From f30d9a1f8dafccb10fb57faa0d66f256a0e82068 Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 19 Jul 2024 11:38:37 -0700 Subject: [PATCH 01/22] Initial gen, Blobs-only changes, no retry in async yet --- .../blob/_generated/_azure_blob_storage.py | 3 +- .../storage/blob/_generated/_serialization.py | 2 + .../azure/storage/blob/_generated/_vendor.py | 16 ------ .../_generated/aio/_azure_blob_storage.py | 3 +- .../aio/operations/_append_blob_operations.py | 8 +-- .../aio/operations/_blob_operations.py | 50 +++++++------------ .../aio/operations/_block_blob_operations.py | 12 +---- .../aio/operations/_container_operations.py | 39 +++++---------- .../aio/operations/_page_blob_operations.py | 17 ++----- .../aio/operations/_service_operations.py | 31 ++++++------ .../operations/_append_blob_operations.py | 8 +-- .../_generated/operations/_blob_operations.py | 50 +++++++------------ .../operations/_block_blob_operations.py | 12 +---- .../operations/_container_operations.py | 39 +++++---------- .../operations/_page_blob_operations.py | 17 ++----- .../operations/_service_operations.py | 31 ++++++------ .../storage/blob/_shared/policies_async.py | 1 - .../azure/storage/blob/aio/_download_async.py | 3 +- .../_azure_data_lake_storage_restapi.py | 3 +- .../filedatalake/_generated/_serialization.py | 2 + .../filedatalake/_generated/_vendor.py | 16 ------ .../aio/_azure_data_lake_storage_restapi.py | 3 +- .../aio/operations/_file_system_operations.py | 14 ++---- .../aio/operations/_path_operations.py | 33 +++++------- .../aio/operations/_service_operations.py | 6 +-- .../_azure_data_lake_storage_restapi_enums.py | 3 +- .../operations/_file_system_operations.py | 14 ++---- .../_generated/operations/_path_operations.py | 33 +++++------- .../operations/_service_operations.py | 6 +-- .../_generated/_azure_file_storage.py | 3 +- .../fileshare/_generated/_serialization.py | 2 + .../storage/fileshare/_generated/_vendor.py | 16 ------ .../_generated/aio/_azure_file_storage.py | 3 +- .../aio/operations/_directory_operations.py | 17 ++----- .../aio/operations/_file_operations.py | 37 +++++--------- .../aio/operations/_service_operations.py | 11 ++-- .../aio/operations/_share_operations.py | 27 ++-------- .../operations/_directory_operations.py | 17 ++----- .../_generated/operations/_file_operations.py | 37 +++++--------- .../operations/_service_operations.py | 11 ++-- .../operations/_share_operations.py | 27 ++-------- .../queue/_generated/_azure_queue_storage.py | 3 +- .../queue/_generated/_serialization.py | 2 + .../azure/storage/queue/_generated/_vendor.py | 16 ------ .../_generated/aio/_azure_queue_storage.py | 3 +- .../aio/operations/_message_id_operations.py | 6 +-- .../aio/operations/_messages_operations.py | 14 ++---- .../aio/operations/_queue_operations.py | 12 +---- .../aio/operations/_service_operations.py | 14 ++---- .../operations/_message_id_operations.py | 6 +-- .../operations/_messages_operations.py | 14 ++---- .../operations/_queue_operations.py | 12 +---- .../operations/_service_operations.py | 14 ++---- 53 files changed, 230 insertions(+), 569 deletions(-) delete mode 100644 sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_vendor.py delete mode 100644 sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_vendor.py delete mode 100644 sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_vendor.py delete mode 100644 sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_vendor.py diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py index ab930440b987..cabfed8f0666 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any +from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies @@ -110,7 +111,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "AzureBlobStorage": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py index f0c6180722c8..8139854b97bb 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_vendor.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py index ef97d289d0c9..c76a291f3c6a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable +from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies @@ -112,7 +113,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureBlobStorage": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py index 2840b96ee011..70b5d865e6d2 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py @@ -19,13 +19,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._append_blob_operations import ( build_append_block_from_url_request, build_append_block_request, @@ -207,7 +205,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -372,7 +369,6 @@ async def append_block( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -574,7 +570,6 @@ async def append_block_from_url( # pylint: disable=inconsistent-return-statemen headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -696,7 +691,6 @@ async def seal( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py index f71ffebc37b5..60a242d61915 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -16,16 +16,16 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._blob_operations import ( build_abort_copy_from_url_request, build_acquire_lease_request, @@ -194,9 +194,9 @@ async def download( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -205,6 +205,10 @@ async def download( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -289,7 +293,7 @@ async def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) @@ -373,7 +377,7 @@ async def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -478,7 +482,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -691,7 +694,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -759,7 +761,6 @@ async def undelete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -839,7 +840,6 @@ async def set_expiry( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -961,7 +961,6 @@ async def set_http_headers( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1055,7 +1054,6 @@ async def set_immutability_policy( # pylint: disable=inconsistent-return-statem headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1129,7 +1127,6 @@ async def delete_immutability_policy( # pylint: disable=inconsistent-return-sta headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1200,7 +1197,6 @@ async def set_legal_hold( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1330,7 +1326,6 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1448,7 +1443,6 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1549,7 +1543,6 @@ async def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1649,7 +1642,6 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1756,7 +1748,6 @@ async def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1863,7 +1854,6 @@ async def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1994,7 +1984,6 @@ async def create_snapshot( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2169,7 +2158,6 @@ async def start_copy_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2357,7 +2345,6 @@ async def copy_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2458,7 +2445,6 @@ async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2573,7 +2559,6 @@ async def set_tier( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2650,7 +2635,6 @@ async def get_account_info( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2783,9 +2767,9 @@ async def query( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2794,6 +2778,10 @@ async def query( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -2857,7 +2845,7 @@ async def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) @@ -2920,7 +2908,7 @@ async def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3001,7 +2989,6 @@ async def get_tags( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3024,7 +3011,7 @@ async def get_tags( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("BlobTags", pipeline_response) + deserialized = self._deserialize("BlobTags", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3117,7 +3104,6 @@ async def set_tags( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py index 5123996799f5..d833c25c0eec 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -19,13 +19,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._block_blob_operations import ( build_commit_block_list_request, build_get_block_list_request, @@ -236,7 +234,6 @@ async def upload( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -471,7 +468,6 @@ async def put_blob_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -610,7 +606,6 @@ async def stage_block( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -773,7 +768,6 @@ async def stage_block_from_url( # pylint: disable=inconsistent-return-statement headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -984,7 +978,6 @@ async def commit_block_list( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1101,7 +1094,6 @@ async def get_block_list( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1130,7 +1122,7 @@ async def get_block_list( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("BlockList", pipeline_response) + deserialized = self._deserialize("BlockList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py index 9a5197df8829..48a1a14749c7 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py @@ -15,16 +15,16 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._container_operations import ( build_acquire_lease_request, build_break_lease_request, @@ -145,7 +145,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -227,7 +226,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -339,7 +337,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -437,7 +434,6 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -521,7 +517,6 @@ async def get_access_policy( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -549,7 +544,7 @@ async def get_access_policy( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -640,7 +635,6 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -725,7 +719,6 @@ async def restore( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -808,7 +801,6 @@ async def rename( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -895,9 +887,9 @@ async def submit_batch( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -906,6 +898,10 @@ async def submit_batch( response = pipeline_response.http_response if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -915,7 +911,7 @@ async def submit_batch( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -998,7 +994,6 @@ async def filter_blobs( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1021,7 +1016,7 @@ async def filter_blobs( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("FilterBlobSegment", pipeline_response) + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1101,7 +1096,6 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1195,7 +1189,6 @@ async def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1288,7 +1281,6 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1388,7 +1380,6 @@ async def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1488,7 +1479,6 @@ async def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1592,7 +1582,6 @@ async def list_blob_flat_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1616,7 +1605,7 @@ async def list_blob_flat_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1705,7 +1694,6 @@ async def list_blob_hierarchy_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1729,7 +1717,7 @@ async def list_blob_hierarchy_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response) + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1780,7 +1768,6 @@ async def get_account_info( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py index 45d5c64de5fb..bf77639fe40e 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py @@ -19,13 +19,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._page_blob_operations import ( build_clear_pages_request, build_copy_incremental_request, @@ -228,7 +226,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -402,7 +399,6 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -564,7 +560,6 @@ async def clear_pages( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -760,7 +755,6 @@ async def upload_pages_from_url( # pylint: disable=inconsistent-return-statemen headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -904,7 +898,6 @@ async def get_page_ranges( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -932,7 +925,7 @@ async def get_page_ranges( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("PageList", pipeline_response) + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1058,7 +1051,6 @@ async def get_page_ranges_diff( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1086,7 +1078,7 @@ async def get_page_ranges_diff( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("PageList", pipeline_response) + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1190,7 +1182,6 @@ async def resize( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1306,7 +1297,6 @@ async def update_sequence_number( # pylint: disable=inconsistent-return-stateme headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1413,7 +1403,6 @@ async def copy_incremental( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py index 7a3c62fec53d..f9f8ff0be86c 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py @@ -15,16 +15,16 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._service_operations import ( build_filter_blobs_request, build_get_account_info_request, @@ -119,7 +119,6 @@ async def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -189,7 +188,6 @@ async def get_properties( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -211,7 +209,7 @@ async def get_properties( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("StorageServiceProperties", pipeline_response) + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -264,7 +262,6 @@ async def get_statistics( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -287,7 +284,7 @@ async def get_statistics( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("StorageServiceStats", pipeline_response) + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -368,7 +365,6 @@ async def list_containers_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -390,7 +386,7 @@ async def list_containers_segment( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -453,7 +449,6 @@ async def get_user_delegation_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -476,7 +471,7 @@ async def get_user_delegation_key( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("UserDelegationKey", pipeline_response) + deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -527,7 +522,6 @@ async def get_account_info( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -615,9 +609,9 @@ async def submit_batch( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -626,6 +620,10 @@ async def submit_batch( response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -635,7 +633,7 @@ async def submit_batch( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -717,7 +715,6 @@ async def filter_blobs( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -740,7 +737,7 @@ async def filter_blobs( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("FilterBlobSegment", pipeline_response) + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py index f950ba4053bc..91b8dd56c7f7 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -577,7 +575,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -742,7 +739,6 @@ def append_block( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -944,7 +940,6 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1066,7 +1061,6 @@ def seal( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index 385821f836a3..6796705afcc5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -16,17 +16,17 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -1601,9 +1601,9 @@ def download( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1612,6 +1612,10 @@ def download( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -1696,7 +1700,7 @@ def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) @@ -1780,7 +1784,7 @@ def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1885,7 +1889,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2098,7 +2101,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2166,7 +2168,6 @@ def undelete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2246,7 +2247,6 @@ def set_expiry( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2368,7 +2368,6 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2462,7 +2461,6 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2536,7 +2534,6 @@ def delete_immutability_policy( # pylint: disable=inconsistent-return-statement headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2607,7 +2604,6 @@ def set_legal_hold( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2737,7 +2733,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2855,7 +2850,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2956,7 +2950,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3056,7 +3049,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3163,7 +3155,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3270,7 +3261,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3401,7 +3391,6 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3576,7 +3565,6 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3764,7 +3752,6 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3865,7 +3852,6 @@ def abort_copy_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3980,7 +3966,6 @@ def set_tier( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4057,7 +4042,6 @@ def get_account_info( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4190,9 +4174,9 @@ def query( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4201,6 +4185,10 @@ def query( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -4264,7 +4252,7 @@ def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) @@ -4327,7 +4315,7 @@ def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4408,7 +4396,6 @@ def get_tags( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4431,7 +4418,7 @@ def get_tags( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("BlobTags", pipeline_response) + deserialized = self._deserialize("BlobTags", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4524,7 +4511,6 @@ def set_tags( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py index 0a3083b16a7b..ab161913ceb0 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -844,7 +842,6 @@ def upload( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1079,7 +1076,6 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1218,7 +1214,6 @@ def stage_block( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1381,7 +1376,6 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1592,7 +1586,6 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1709,7 +1702,6 @@ def get_block_list( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1738,7 +1730,7 @@ def get_block_list( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("BlockList", pipeline_response) + deserialized = self._deserialize("BlockList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py index 37fe1f75556b..719059977673 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py @@ -16,17 +16,17 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -976,7 +976,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1058,7 +1057,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1170,7 +1168,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1268,7 +1265,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1352,7 +1348,6 @@ def get_access_policy( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1380,7 +1375,7 @@ def get_access_policy( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1471,7 +1466,6 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1556,7 +1550,6 @@ def restore( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1639,7 +1632,6 @@ def rename( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1726,9 +1718,9 @@ def submit_batch( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1737,6 +1729,10 @@ def submit_batch( response = pipeline_response.http_response if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -1746,7 +1742,7 @@ def submit_batch( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1829,7 +1825,6 @@ def filter_blobs( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1852,7 +1847,7 @@ def filter_blobs( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("FilterBlobSegment", pipeline_response) + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1932,7 +1927,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2026,7 +2020,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2119,7 +2112,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2219,7 +2211,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2319,7 +2310,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2423,7 +2413,6 @@ def list_blob_flat_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2447,7 +2436,7 @@ def list_blob_flat_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2536,7 +2525,6 @@ def list_blob_hierarchy_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2560,7 +2548,7 @@ def list_blob_hierarchy_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response) + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2611,7 +2599,6 @@ def get_account_info( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py index e388063fc7fa..a280a9f3048d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -980,7 +978,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1154,7 +1151,6 @@ def upload_pages( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1316,7 +1312,6 @@ def clear_pages( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1512,7 +1507,6 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1656,7 +1650,6 @@ def get_page_ranges( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1684,7 +1677,7 @@ def get_page_ranges( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("PageList", pipeline_response) + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1810,7 +1803,6 @@ def get_page_ranges_diff( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1838,7 +1830,7 @@ def get_page_ranges_diff( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("PageList", pipeline_response) + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1942,7 +1934,6 @@ def resize( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2058,7 +2049,6 @@ def update_sequence_number( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2165,7 +2155,6 @@ def copy_incremental( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py index 0d1bc1509661..2e2a84dc524d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py @@ -15,17 +15,17 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -427,7 +427,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -497,7 +496,6 @@ def get_properties( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -519,7 +517,7 @@ def get_properties( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("StorageServiceProperties", pipeline_response) + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -572,7 +570,6 @@ def get_statistics( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -595,7 +592,7 @@ def get_statistics( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("StorageServiceStats", pipeline_response) + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -676,7 +673,6 @@ def list_containers_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -698,7 +694,7 @@ def list_containers_segment( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -761,7 +757,6 @@ def get_user_delegation_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -784,7 +779,7 @@ def get_user_delegation_key( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("UserDelegationKey", pipeline_response) + deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -835,7 +830,6 @@ def get_account_info( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -923,9 +917,9 @@ def submit_batch( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -934,6 +928,10 @@ def submit_batch( response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -943,7 +941,7 @@ def submit_batch( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1025,7 +1023,6 @@ def filter_blobs( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1048,7 +1045,7 @@ def filter_blobs( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("FilterBlobSegment", pipeline_response) + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index bf03d3690598..0e651471e560 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -64,7 +64,6 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - await response.http_response.load_body() will_retry = is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 1807c43ab5d3..c88b8fa905c9 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -33,7 +33,8 @@ async def process_content(data, start_offset, end_offset, encryption): if data is None: raise ValueError("Response cannot be None.") - content = data.response.body() + await data.response.read() + content = data.response.content if encryption.get('key') is not None or encryption.get('resolver') is not None: try: return decrypt_blob( diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py index b266181aabec..e1dec2bc80cd 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Optional +from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies @@ -103,7 +104,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "AzureDataLakeStorageRESTAPI": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py index f0c6180722c8..8139854b97bb 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_vendor.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py index 74c4f0620bce..a16a5be74366 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable, Optional +from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies @@ -105,7 +106,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureDataLakeStorageRESTAPI": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py index f5c11c835fb5..43db73b3c9b5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._file_system_operations import ( build_create_request, build_delete_request, @@ -119,7 +117,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -217,7 +214,6 @@ async def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -285,7 +281,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -379,7 +374,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -480,7 +474,6 @@ async def list_paths( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -503,7 +496,7 @@ async def list_paths( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - deserialized = self._deserialize("PathList", pipeline_response) + deserialized = self._deserialize("PathList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -594,7 +587,6 @@ async def list_blob_hierarchy_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -618,7 +610,7 @@ async def list_blob_hierarchy_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response) + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py index 89ac80e699c6..edd172f6b53d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py @@ -15,16 +15,16 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._path_operations import ( build_append_data_request, build_create_request, @@ -292,7 +292,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -541,7 +540,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -578,7 +576,7 @@ async def update( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response) + deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) if response.status_code == 202: response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) @@ -619,7 +617,7 @@ async def lease( # pylint: disable=inconsistent-return-statements the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values - are: "acquire", "break", "change", "renew", and "release". Required. + are: "acquire", "break", "change", "renew", "release", and "break". Required. :type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -689,7 +687,6 @@ async def lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -830,9 +827,9 @@ async def read( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -841,6 +838,10 @@ async def read( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -879,7 +880,7 @@ async def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) @@ -915,7 +916,7 @@ async def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1012,7 +1013,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1146,7 +1146,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1270,7 +1269,6 @@ async def set_access_control( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1381,7 +1379,6 @@ async def set_access_control_recursive( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1405,7 +1402,7 @@ async def set_access_control_recursive( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response) + deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1573,7 +1570,6 @@ async def flush_data( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1734,7 +1730,6 @@ async def append_data( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1826,7 +1821,6 @@ async def set_expiry( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1904,7 +1898,6 @@ async def undelete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py index 57032bf8b7dc..a6da031a1483 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py @@ -19,13 +19,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._service_operations import build_list_file_systems_request if sys.version_info >= (3, 9): @@ -124,12 +122,10 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: _request = HttpRequest("GET", next_link) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py index acaa735596d2..c9bb43b5e4a0 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py @@ -51,10 +51,11 @@ class PathLeaseAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """PathLeaseAction.""" ACQUIRE = "acquire" - BREAK_ENUM = "break" + BREAK = "break" CHANGE = "change" RENEW = "renew" RELEASE = "release" + BREAK_ENUM = "break" class PathRenameMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py index 69efc186e612..35dbe9f3738b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -380,7 +378,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -478,7 +475,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -546,7 +542,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -640,7 +635,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -741,7 +735,6 @@ def list_paths( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -764,7 +757,7 @@ def list_paths( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - deserialized = self._deserialize("PathList", pipeline_response) + deserialized = self._deserialize("PathList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -855,7 +848,6 @@ def list_blob_hierarchy_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -879,7 +871,7 @@ def list_blob_hierarchy_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response) + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py index e3d158497721..4c6d623ee029 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py @@ -16,17 +16,17 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -1154,7 +1154,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1403,7 +1402,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1440,7 +1438,7 @@ def update( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response) + deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) if response.status_code == 202: response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) @@ -1481,7 +1479,7 @@ def lease( # pylint: disable=inconsistent-return-statements the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values - are: "acquire", "break", "change", "renew", and "release". Required. + are: "acquire", "break", "change", "renew", "release", and "break". Required. :type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1551,7 +1549,6 @@ def lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1692,9 +1689,9 @@ def read( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1703,6 +1700,10 @@ def read( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -1741,7 +1742,7 @@ def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) @@ -1777,7 +1778,7 @@ def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1874,7 +1875,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2008,7 +2008,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2132,7 +2131,6 @@ def set_access_control( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2243,7 +2241,6 @@ def set_access_control_recursive( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2267,7 +2264,7 @@ def set_access_control_recursive( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response) + deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2435,7 +2432,6 @@ def flush_data( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2596,7 +2592,6 @@ def append_data( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2688,7 +2683,6 @@ def set_expiry( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2766,7 +2760,6 @@ def undelete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py index 5c3b01d7576c..e9bb654ff33a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py @@ -19,14 +19,12 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -172,12 +170,10 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: _request = HttpRequest("GET", next_link) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py index aac0adcdceec..25b49ce0491c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Optional, Union +from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies @@ -121,7 +122,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "AzureFileStorage": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py index f0c6180722c8..8139854b97bb 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_vendor.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py index 46b977e4d579..02bb3b901ebc 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable, Optional, Union +from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies @@ -123,7 +124,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureFileStorage": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py index 2877ddb3fa11..de3098fd9608 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._directory_operations import ( build_create_request, build_delete_request, @@ -145,7 +143,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -235,7 +232,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -321,7 +317,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -422,7 +417,6 @@ async def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -512,7 +506,6 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -615,7 +608,6 @@ async def list_files_and_directories_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -636,7 +628,7 @@ async def list_files_and_directories_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListFilesAndDirectoriesSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListFilesAndDirectoriesSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -707,7 +699,6 @@ async def list_handles( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -728,7 +719,7 @@ async def list_handles( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListHandlesResponse", pipeline_response) + deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -798,7 +789,6 @@ async def force_close_handles( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -945,7 +935,6 @@ async def rename( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py index 8aa0012fd84a..1a2c31f806c0 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py @@ -15,16 +15,16 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._file_operations import ( build_abort_copy_request, build_acquire_lease_request, @@ -188,7 +188,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -290,9 +289,9 @@ async def download( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -301,6 +300,10 @@ async def download( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -367,7 +370,7 @@ async def download( response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) @@ -430,7 +433,7 @@ async def download( response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -490,7 +493,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -604,7 +606,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -739,7 +740,6 @@ async def set_http_headers( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -838,7 +838,6 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -927,7 +926,6 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1006,7 +1004,6 @@ async def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1094,7 +1091,6 @@ async def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1181,7 +1177,6 @@ async def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1301,7 +1296,6 @@ async def upload_range( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1434,7 +1428,6 @@ async def upload_range_from_url( # pylint: disable=inconsistent-return-statemen headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1541,7 +1534,6 @@ async def get_range_list( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1564,7 +1556,7 @@ async def get_range_list( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ShareFileRangeList", pipeline_response) + deserialized = self._deserialize("ShareFileRangeList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1673,7 +1665,6 @@ async def start_copy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1759,7 +1750,6 @@ async def abort_copy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1841,7 +1831,6 @@ async def list_handles( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1862,7 +1851,7 @@ async def list_handles( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListHandlesResponse", pipeline_response) + deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1927,7 +1916,6 @@ async def force_close_handles( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2079,7 +2067,6 @@ async def rename( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py index 82b5d5681cc2..8b94bb143ea1 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._service_operations import ( build_get_properties_request, build_list_shares_segment_request, @@ -105,7 +103,6 @@ async def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -165,7 +162,6 @@ async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -184,7 +180,7 @@ async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("StorageServiceProperties", pipeline_response) + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -254,7 +250,6 @@ async def list_shares_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -273,7 +268,7 @@ async def list_shares_segment( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("ListSharesResponse", pipeline_response) + deserialized = self._deserialize("ListSharesResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py index bb89d8fb19c1..6050eb7818ad 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py @@ -19,13 +19,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._share_operations import ( build_acquire_lease_request, build_break_lease_request, @@ -140,7 +138,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -218,7 +215,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -335,7 +331,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -425,7 +420,6 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -513,7 +507,6 @@ async def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -606,7 +599,6 @@ async def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -694,7 +686,6 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -796,7 +787,6 @@ async def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -869,7 +859,6 @@ async def create_snapshot( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1002,7 +991,6 @@ async def create_permission( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1071,7 +1059,6 @@ async def get_permission( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1091,7 +1078,7 @@ async def get_permission( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("SharePermission", pipeline_response) + deserialized = self._deserialize("SharePermission", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1165,7 +1152,6 @@ async def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1244,7 +1230,6 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1318,7 +1303,6 @@ async def get_access_policy( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1340,7 +1324,7 @@ async def get_access_policy( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1409,7 +1393,6 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1483,7 +1466,6 @@ async def get_statistics( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1505,7 +1487,7 @@ async def get_statistics( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ShareStats", pipeline_response) + deserialized = self._deserialize("ShareStats", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1569,7 +1551,6 @@ async def restore( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py index 52e76948e026..544f4bf39fee 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -623,7 +621,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -713,7 +710,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -799,7 +795,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -900,7 +895,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -990,7 +984,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1093,7 +1086,6 @@ def list_files_and_directories_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1114,7 +1106,7 @@ def list_files_and_directories_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListFilesAndDirectoriesSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListFilesAndDirectoriesSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1185,7 +1177,6 @@ def list_handles( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1206,7 +1197,7 @@ def list_handles( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListHandlesResponse", pipeline_response) + deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1276,7 +1267,6 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1423,7 +1413,6 @@ def rename( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py index a26b1206aa0a..47c85840dbdc 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py @@ -15,17 +15,17 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -1213,7 +1213,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1315,9 +1314,9 @@ def download( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1326,6 +1325,10 @@ def download( response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) @@ -1392,7 +1395,7 @@ def download( response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) @@ -1455,7 +1458,7 @@ def download( response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - deserialized = response.stream_download(self._client._pipeline) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1515,7 +1518,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1629,7 +1631,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1764,7 +1765,6 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1863,7 +1863,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1952,7 +1951,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2031,7 +2029,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2119,7 +2116,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2206,7 +2202,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2326,7 +2321,6 @@ def upload_range( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2459,7 +2453,6 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2566,7 +2559,6 @@ def get_range_list( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2589,7 +2581,7 @@ def get_range_list( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ShareFileRangeList", pipeline_response) + deserialized = self._deserialize("ShareFileRangeList", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2698,7 +2690,6 @@ def start_copy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2784,7 +2775,6 @@ def abort_copy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2866,7 +2856,6 @@ def list_handles( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2887,7 +2876,7 @@ def list_handles( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListHandlesResponse", pipeline_response) + deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2952,7 +2941,6 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3104,7 +3092,6 @@ def rename( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py index 7923f4cc5bcc..00cf74f636b3 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -214,7 +212,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -274,7 +271,6 @@ def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _model headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -293,7 +289,7 @@ def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _model response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("StorageServiceProperties", pipeline_response) + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -363,7 +359,6 @@ def list_shares_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -382,7 +377,7 @@ def list_shares_segment( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("ListSharesResponse", pipeline_response) + deserialized = self._deserialize("ListSharesResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py index 4f78e8fd55ff..3c2a8739b608 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -854,7 +852,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -932,7 +929,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1049,7 +1045,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1139,7 +1134,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1227,7 +1221,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1320,7 +1313,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1408,7 +1400,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1510,7 +1501,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1583,7 +1573,6 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1716,7 +1705,6 @@ def create_permission( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1785,7 +1773,6 @@ def get_permission( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1805,7 +1792,7 @@ def get_permission( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("SharePermission", pipeline_response) + deserialized = self._deserialize("SharePermission", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1879,7 +1866,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1958,7 +1944,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2032,7 +2017,6 @@ def get_access_policy( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2054,7 +2038,7 @@ def get_access_policy( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2123,7 +2107,6 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2197,7 +2180,6 @@ def get_statistics( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2219,7 +2201,7 @@ def get_statistics( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ShareStats", pipeline_response) + deserialized = self._deserialize("ShareStats", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2283,7 +2265,6 @@ def restore( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py index 9b9522e6e364..ce3d7d165b43 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any +from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies @@ -97,7 +98,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "AzureQueueStorage": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py index f0c6180722c8..8139854b97bb 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_vendor.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py index a6e4460ecb1f..586e4c7d28cd 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable +from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies @@ -99,7 +100,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureQueueStorage": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py index dddee6d2daf2..fcd95f055e5c 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._message_id_operations import build_delete_request, build_update_request if sys.version_info >= (3, 9): @@ -124,7 +122,6 @@ async def update( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -194,7 +191,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py index 7be183ba87d7..72fbd1738bf2 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._messages_operations import ( build_clear_request, build_dequeue_request, @@ -116,7 +114,6 @@ async def dequeue( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -136,7 +133,7 @@ async def dequeue( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[DequeuedMessageItem]", pipeline_response) + deserialized = self._deserialize("[DequeuedMessageItem]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -182,7 +179,6 @@ async def clear( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -276,7 +272,6 @@ async def enqueue( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -296,7 +291,7 @@ async def enqueue( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[EnqueuedMessage]", pipeline_response) + deserialized = self._deserialize("[EnqueuedMessage]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -355,7 +350,6 @@ async def peek( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -375,7 +369,7 @@ async def peek( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[PeekedMessageItem]", pipeline_response) + deserialized = self._deserialize("[PeekedMessageItem]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py index 796f71282102..405d2fed8015 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._queue_operations import ( build_create_request, build_delete_request, @@ -111,7 +109,6 @@ async def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -179,7 +176,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -244,7 +240,6 @@ async def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -324,7 +319,6 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -389,7 +383,6 @@ async def get_access_policy( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -409,7 +402,7 @@ async def get_access_policy( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -474,7 +467,6 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py index ff256d797a87..809294124f93 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py @@ -18,13 +18,11 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models -from ..._vendor import _convert_request from ...operations._service_operations import ( build_get_properties_request, build_get_statistics_request, @@ -114,7 +112,6 @@ async def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -180,7 +177,6 @@ async def get_properties( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -199,7 +195,7 @@ async def get_properties( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("StorageServiceProperties", pipeline_response) + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -251,7 +247,6 @@ async def get_statistics( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -271,7 +266,7 @@ async def get_statistics( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("StorageServiceStats", pipeline_response) + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -350,7 +345,6 @@ async def list_queues_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -370,7 +364,7 @@ async def list_queues_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListQueuesSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListQueuesSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py index 32e90d88ef22..b4db4ab66666 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -207,7 +205,6 @@ def update( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -277,7 +274,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py index 96bfb1fb774c..61a581285efe 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -271,7 +269,6 @@ def dequeue( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -291,7 +288,7 @@ def dequeue( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[DequeuedMessageItem]", pipeline_response) + deserialized = self._deserialize("[DequeuedMessageItem]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -337,7 +334,6 @@ def clear( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -431,7 +427,6 @@ def enqueue( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -451,7 +446,7 @@ def enqueue( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[EnqueuedMessage]", pipeline_response) + deserialized = self._deserialize("[EnqueuedMessage]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -510,7 +505,6 @@ def peek( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -530,7 +524,7 @@ def peek( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[PeekedMessageItem]", pipeline_response) + deserialized = self._deserialize("[PeekedMessageItem]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py index 8de6bf6679f3..ed09febc29cd 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -317,7 +315,6 @@ def create( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -385,7 +382,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -450,7 +446,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -530,7 +525,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -595,7 +589,6 @@ def get_access_policy( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -615,7 +608,7 @@ def get_access_policy( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -680,7 +673,6 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py index f9eea8cde861..99f097bebcf5 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -265,7 +263,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -331,7 +328,6 @@ def get_properties( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -350,7 +346,7 @@ def get_properties( response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - deserialized = self._deserialize("StorageServiceProperties", pipeline_response) + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -402,7 +398,6 @@ def get_statistics( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -422,7 +417,7 @@ def get_statistics( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("StorageServiceStats", pipeline_response) + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -501,7 +496,6 @@ def list_queues_segment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -521,7 +515,7 @@ def list_queues_segment( response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - deserialized = self._deserialize("ListQueuesSegmentResponse", pipeline_response) + deserialized = self._deserialize("ListQueuesSegmentResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore From fdb7d2677e248da7efbeef2307015f13406a7409 Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 19 Jul 2024 15:17:17 -0700 Subject: [PATCH 02/22] Matching _shared --- .../azure/storage/filedatalake/_shared/policies_async.py | 1 - .../azure/storage/fileshare/_shared/policies_async.py | 1 - .../azure/storage/queue/_shared/policies_async.py | 1 - 3 files changed, 3 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py index c41637ee432b..85afa1bc5754 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py @@ -64,7 +64,6 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - await response.http_response.load_body() will_retry = is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index bf03d3690598..0e651471e560 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -64,7 +64,6 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - await response.http_response.load_body() will_retry = is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index bf03d3690598..0e651471e560 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -64,7 +64,6 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - await response.http_response.load_body() will_retry = is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again From 543d93bf59e7b34093eee4db9e45cedbf3953508 Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Mon, 22 Jul 2024 13:50:18 -0700 Subject: [PATCH 03/22] Decouple is_retry in policies_async, test in blob then propagate to other packages --- .../storage/blob/_shared/policies_async.py | 59 +++++++++++++++++-- 1 file changed, 55 insertions(+), 4 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index 0e651471e560..5997116a1d28 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -6,16 +6,25 @@ # pylint: disable=invalid-overridden-method import asyncio +import base64 +import hashlib import logging import random +from io import SEEK_SET from typing import Any, Dict, TYPE_CHECKING -from azure.core.exceptions import AzureError +from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy from .authentication import StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .policies import is_retry, StorageRetryPolicy +from .models import LocationMode +from .policies import StorageRetryPolicy, StorageContentValidation + +try: + _unicode_type = unicode # type: ignore +except NameError: + _unicode_type = str if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -28,6 +37,48 @@ _LOGGER = logging.getLogger(__name__) +def encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +# Is this method/status code retryable? (Based on allowlists and control +# variables such as the number of total retries to allow, whether to +# respect the Retry-After header, whether this header is present, and +# whether the returned status code is on the list of status codes to +# be retried upon on the presence of the aforementioned header) +async def is_retry(response, mode): # pylint: disable=too-many-return-statements + status = response.http_response.status_code + if 300 <= status < 500: + # An exception occurred, but in most cases it was expected. Examples could + # include a 309 Conflict or 412 Precondition Failed. + if status == 404 and mode == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + if status == 408: + # Response code 408 is a timeout and should be retried. + return True + return False + if status >= 500: + # Response codes above 500 with the exception of 501 Not Implemented and + # 505 Version Not Supported indicate a server issue and should be retried. + if status in [501, 505]: + return False + return True + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -65,7 +116,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = await is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -111,7 +162,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, retry_settings['mode']): + if await is_retry(response, retry_settings['mode']): retries_remaining = self.increment( retry_settings, request=request.http_request, From 87fe7fb04c396c21c0748e8ced5023d135e96086 Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Mon, 22 Jul 2024 15:16:36 -0700 Subject: [PATCH 04/22] Fix file_share one-off async body loading --- .../azure/storage/fileshare/aio/_download_async.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_download_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_download_async.py index 4bead0ebf43f..22b5121b13cb 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_download_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_download_async.py @@ -22,7 +22,8 @@ async def process_content(data): raise ValueError("Response cannot be None.") try: - return data.response.body() + await data.response.read() + return data.response.content except Exception as error: raise HttpResponseError(message="Download stream interrupted.", response=data.response, error=error) from error From f2ea4c10cd2eef9a15da769434602b95a4393d4b Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Tue, 23 Jul 2024 14:25:57 -0700 Subject: [PATCH 05/22] Fileshare's policies_async --- .../fileshare/_shared/policies_async.py | 59 +++++++++++++++++-- 1 file changed, 55 insertions(+), 4 deletions(-) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index 0e651471e560..5997116a1d28 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -6,16 +6,25 @@ # pylint: disable=invalid-overridden-method import asyncio +import base64 +import hashlib import logging import random +from io import SEEK_SET from typing import Any, Dict, TYPE_CHECKING -from azure.core.exceptions import AzureError +from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy from .authentication import StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .policies import is_retry, StorageRetryPolicy +from .models import LocationMode +from .policies import StorageRetryPolicy, StorageContentValidation + +try: + _unicode_type = unicode # type: ignore +except NameError: + _unicode_type = str if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -28,6 +37,48 @@ _LOGGER = logging.getLogger(__name__) +def encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +# Is this method/status code retryable? (Based on allowlists and control +# variables such as the number of total retries to allow, whether to +# respect the Retry-After header, whether this header is present, and +# whether the returned status code is on the list of status codes to +# be retried upon on the presence of the aforementioned header) +async def is_retry(response, mode): # pylint: disable=too-many-return-statements + status = response.http_response.status_code + if 300 <= status < 500: + # An exception occurred, but in most cases it was expected. Examples could + # include a 309 Conflict or 412 Precondition Failed. + if status == 404 and mode == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + if status == 408: + # Response code 408 is a timeout and should be retried. + return True + return False + if status >= 500: + # Response codes above 500 with the exception of 501 Not Implemented and + # 505 Version Not Supported indicate a server issue and should be retried. + if status in [501, 505]: + return False + return True + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -65,7 +116,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = await is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -111,7 +162,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, retry_settings['mode']): + if await is_retry(response, retry_settings['mode']): retries_remaining = self.increment( retry_settings, request=request.http_request, From 6228d1dd55b7270a2bab9b3f4ca9081e843a898e Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Tue, 23 Jul 2024 16:09:36 -0700 Subject: [PATCH 06/22] Lint + retry, next step is consistent _shared --- .../storage/blob/_shared/policies_async.py | 2 - .../azure/storage/blob/aio/_download_async.py | 141 ++++++++++-------- .../fileshare/_shared/policies_async.py | 2 - 3 files changed, 81 insertions(+), 64 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index 5997116a1d28..9a44110992f9 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -7,10 +7,8 @@ import asyncio import base64 -import hashlib import logging import random -from io import SEEK_SET from typing import Any, Dict, TYPE_CHECKING from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 92fd724c95df..704deaf57712 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -10,6 +10,7 @@ import codecs import sys import warnings +from aiohttp import ClientPayloadError from io import BytesIO, StringIO from itertools import islice from typing import ( @@ -19,7 +20,7 @@ Tuple, TypeVar, Union, TYPE_CHECKING ) -from azure.core.exceptions import HttpResponseError +from azure.core.exceptions import HttpResponseError, ServiceResponseError from .._shared.request_handlers import validate_and_format_range_headers from .._shared.response_handlers import parse_length_from_content_range, process_storage_error @@ -121,18 +122,27 @@ async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes download_range[1], check_content_md5=self.validate_content ) - try: - _, response = await cast(Awaitable[Any], self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - )) - - except HttpResponseError as error: - process_storage_error(error) + retry_active = True + retry_total = 3 + while retry_active: + try: + _, response = await cast(Awaitable[Any], self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + )) + retry_active = False + + except HttpResponseError as error: + process_storage_error(error) + except ClientPayloadError as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + await asyncio.sleep(1) chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) content_length = response.content_length @@ -345,54 +355,64 @@ async def _initial_request(self): end_range_required=False, check_content_md5=self._validate_content) - try: - location_mode, response = cast(Tuple[Optional[str], Any], await self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options)) - - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._file_size is None: - raise ValueError("Required Content-Range response header is missing or malformed.") - # Remove any extra encryption data size from blob size - self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) - - if self._end_range is not None and self._start_range is not None: - # Use the length unless it is over the end of the file - self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = cast(Tuple[Optional[str], Any], await self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options)) + + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._file_size is None: + raise ValueError("Required Content-Range response header is missing or malformed.") + # Remove any extra encryption data size from blob size + self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) + + if self._end_range is not None and self._start_range is not None: + # Use the length unless it is over the end of the file + self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size + retry_active = False - except HttpResponseError as error: - if self._start_range is None and error.response and error.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = cast(Tuple[Optional[Any], Any], await self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options)) - except HttpResponseError as e: - process_storage_error(e) - - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) + except HttpResponseError as error: + if self._start_range is None and error.response and error.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = cast(Tuple[Optional[Any], Any], await self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options)) + except HttpResponseError as e: + process_storage_error(e) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: + process_storage_error(error) + + except ClientPayloadError as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + await asyncio.sleep(1) if self.size == 0: self._current_content = b"" @@ -848,3 +868,4 @@ async def download_to_stream(self, stream, max_concurrency=1): self._max_concurrency = max_concurrency await self.readinto(stream) return self.properties + diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index 5997116a1d28..9a44110992f9 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -7,10 +7,8 @@ import asyncio import base64 -import hashlib import logging import random -from io import SEEK_SET from typing import Any, Dict, TYPE_CHECKING from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError From 513f1f23d5ffb7a4b88b4aa898c9887c545c2c1e Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Tue, 23 Jul 2024 18:24:06 -0700 Subject: [PATCH 07/22] Fix wrong translation mapping --- .../azure/storage/blob/aio/_download_async.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 704deaf57712..32b61337bd38 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -20,7 +20,7 @@ Tuple, TypeVar, Union, TYPE_CHECKING ) -from azure.core.exceptions import HttpResponseError, ServiceResponseError +from azure.core.exceptions import HttpResponseError, IncompleteReadError, ServiceResponseError from .._shared.request_handlers import validate_and_format_range_headers from .._shared.response_handlers import parse_length_from_content_range, process_storage_error @@ -138,7 +138,7 @@ async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes except HttpResponseError as error: process_storage_error(error) - except ClientPayloadError as error: + except IncompleteReadError as error: retry_total -= 1 if retry_total <= 0: raise ServiceResponseError(error, error=error) From f8bedfa162e6dd220adb71a5c7f7ca805da3c82f Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Wed, 24 Jul 2024 13:09:32 -0700 Subject: [PATCH 08/22] Make _download_async match sync, passing locally --- .../azure/storage/blob/aio/_download_async.py | 50 ++++++++++--------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 32b61337bd38..5acb28810dde 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -10,7 +10,6 @@ import codecs import sys import warnings -from aiohttp import ClientPayloadError from io import BytesIO, StringIO from itertools import islice from typing import ( @@ -20,7 +19,7 @@ Tuple, TypeVar, Union, TYPE_CHECKING ) -from azure.core.exceptions import HttpResponseError, IncompleteReadError, ServiceResponseError +from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError from .._shared.request_handlers import validate_and_format_range_headers from .._shared.response_handlers import parse_length_from_content_range, process_storage_error @@ -122,6 +121,7 @@ async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes download_range[1], check_content_md5=self.validate_content ) + retry_active = True retry_total = 3 while retry_active: @@ -134,17 +134,17 @@ async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes download_stream_current=self.progress_total, **self.request_options )) - retry_active = False - except HttpResponseError as error: process_storage_error(error) - except IncompleteReadError as error: + + try: + chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: retry_total -= 1 if retry_total <= 0: - raise ServiceResponseError(error, error=error) + raise HttpResponseError(error, error=error) from error await asyncio.sleep(1) - - chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) content_length = response.content_length # This makes sure that if_match is set so that we can validate @@ -353,7 +353,8 @@ async def _initial_request(self): self._initial_range[1], start_range_required=False, end_range_required=False, - check_content_md5=self._validate_content) + check_content_md5=self._validate_content + ) retry_active = True retry_total = 3 @@ -365,7 +366,8 @@ async def _initial_request(self): validate_content=self._validate_content, data_stream_total=None, download_stream_current=0, - **self._request_options)) + **self._request_options + )) # Check the location we read from to ensure we use the same one # for subsequent requests. @@ -386,7 +388,6 @@ async def _initial_request(self): self.size = self._file_size - self._start_range else: self.size = self._file_size - retry_active = False except HttpResponseError as error: if self._start_range is None and error.response and error.status_code == 416: @@ -407,22 +408,23 @@ async def _initial_request(self): self._file_size = 0 else: process_storage_error(error) - - except ClientPayloadError as error: + + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = await process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: retry_total -= 1 if retry_total <= 0: - raise ServiceResponseError(error, error=error) + raise HttpResponseError(error, error=error) from error await asyncio.sleep(1) - - if self.size == 0: - self._current_content = b"" - else: - self._current_content = await process_content( - response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) self._download_offset += len(self._current_content) self._raw_download_offset += response.content_length From 33c9f5f57ef029b5047b4a60c71fad048129b32c Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Wed, 24 Jul 2024 15:35:42 -0700 Subject: [PATCH 09/22] Re-record since tests seem to be hitting new readall logic and some encodings changed --- sdk/storage/azure-storage-blob/assets.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-blob/assets.json b/sdk/storage/azure-storage-blob/assets.json index 8ef282904b09..f09e04f00aad 100644 --- a/sdk/storage/azure-storage-blob/assets.json +++ b/sdk/storage/azure-storage-blob/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-blob", - "Tag": "python/storage/azure-storage-blob_4bb162f320" + "Tag": "python/storage/azure-storage-blob_c054c9a5ee" } From 4ac2cc4cd352cafef8b8f3b2ba4bbdbfd5d34eec Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Wed, 24 Jul 2024 16:54:12 -0700 Subject: [PATCH 10/22] Async encryption v1 --- sdk/storage/azure-storage-blob/assets.json | 2 +- sdk/storage/azure-storage-blob/tests/test_blob_encryption.py | 4 ++-- .../azure-storage-blob/tests/test_blob_encryption_async.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/storage/azure-storage-blob/assets.json b/sdk/storage/azure-storage-blob/assets.json index f09e04f00aad..84d61cd19419 100644 --- a/sdk/storage/azure-storage-blob/assets.json +++ b/sdk/storage/azure-storage-blob/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-blob", - "Tag": "python/storage/azure-storage-blob_c054c9a5ee" + "Tag": "python/storage/azure-storage-blob_fad0e99de3" } diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py b/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py index 600f28efc470..f7eeadde11de 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_encryption.py @@ -174,7 +174,7 @@ def test_invalid_value_kek_unwrap(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') - blob = self._create_small_blob(BlobType.BlockBlob) + blob = self._create_small_blob(BlobType.BLOCKBLOB) # Act blob.key_encryption_key = KeyWrapper('key1') @@ -250,7 +250,7 @@ def test_get_blob_nonmatching_kid(self, **kwargs): self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') - blob = self._create_small_blob(BlobType.BlockBlob) + blob = self._create_small_blob(BlobType.BLOCKBLOB) # Act self.bsc.key_encryption_key.kid = 'Invalid' diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py index c155b9cd97fd..d715060ef6e8 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_encryption_async.py @@ -178,7 +178,7 @@ async def test_invalid_value_kek_unwrap(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') - blob = await self._create_small_blob(BlobType.BlockBlob) + blob = await self._create_small_blob(BlobType.BLOCKBLOB) # Act blob.key_encryption_key = KeyWrapper('key1') @@ -257,7 +257,7 @@ async def test_get_blob_nonmatching_kid(self, **kwargs): await self._setup(storage_account_name, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') - blob = await self._create_small_blob(BlobType.BlockBlob) + blob = await self._create_small_blob(BlobType.BLOCKBLOB) # Act self.bsc.key_encryption_key.kid = 'Invalid' From c0ebfec77db551b4c62bb7fa07df43f1ce72d1d2 Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Wed, 24 Jul 2024 18:12:16 -0700 Subject: [PATCH 11/22] Propagate policies_async to datalake & queue --- .../filedatalake/_shared/policies_async.py | 57 +++++++++++++++++-- .../storage/queue/_shared/policies_async.py | 57 +++++++++++++++++-- 2 files changed, 106 insertions(+), 8 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py index 85afa1bc5754..a848cb42c773 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py @@ -6,16 +6,23 @@ # pylint: disable=invalid-overridden-method import asyncio +import base64 import logging import random from typing import Any, Dict, TYPE_CHECKING -from azure.core.exceptions import AzureError +from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy from .authentication import StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .policies import is_retry, StorageRetryPolicy +from .models import LocationMode +from .policies import StorageRetryPolicy, StorageContentValidation + +try: + _unicode_type = unicode # type: ignore +except NameError: + _unicode_type = str if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -28,6 +35,48 @@ _LOGGER = logging.getLogger(__name__) +def encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +# Is this method/status code retryable? (Based on allowlists and control +# variables such as the number of total retries to allow, whether to +# respect the Retry-After header, whether this header is present, and +# whether the returned status code is on the list of status codes to +# be retried upon on the presence of the aforementioned header) +async def is_retry(response, mode): # pylint: disable=too-many-return-statements + status = response.http_response.status_code + if 300 <= status < 500: + # An exception occurred, but in most cases it was expected. Examples could + # include a 309 Conflict or 412 Precondition Failed. + if status == 404 and mode == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + if status == 408: + # Response code 408 is a timeout and should be retried. + return True + return False + if status >= 500: + # Response codes above 500 with the exception of 501 Not Implemented and + # 505 Version Not Supported indicate a server issue and should be retried. + if status in [501, 505]: + return False + return True + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -65,7 +114,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = await is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -111,7 +160,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, retry_settings['mode']): + if await is_retry(response, retry_settings['mode']): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index 0e651471e560..9a44110992f9 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -6,16 +6,23 @@ # pylint: disable=invalid-overridden-method import asyncio +import base64 import logging import random from typing import Any, Dict, TYPE_CHECKING -from azure.core.exceptions import AzureError +from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy from .authentication import StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .policies import is_retry, StorageRetryPolicy +from .models import LocationMode +from .policies import StorageRetryPolicy, StorageContentValidation + +try: + _unicode_type = unicode # type: ignore +except NameError: + _unicode_type = str if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -28,6 +35,48 @@ _LOGGER = logging.getLogger(__name__) +def encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +# Is this method/status code retryable? (Based on allowlists and control +# variables such as the number of total retries to allow, whether to +# respect the Retry-After header, whether this header is present, and +# whether the returned status code is on the list of status codes to +# be retried upon on the presence of the aforementioned header) +async def is_retry(response, mode): # pylint: disable=too-many-return-statements + status = response.http_response.status_code + if 300 <= status < 500: + # An exception occurred, but in most cases it was expected. Examples could + # include a 309 Conflict or 412 Precondition Failed. + if status == 404 and mode == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + if status == 408: + # Response code 408 is a timeout and should be retried. + return True + return False + if status >= 500: + # Response codes above 500 with the exception of 501 Not Implemented and + # 505 Version Not Supported indicate a server issue and should be retried. + if status in [501, 505]: + return False + return True + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -65,7 +114,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = await is_retry(response, request.context.options.get('mode')) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -111,7 +160,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, retry_settings['mode']): + if await is_retry(response, retry_settings['mode']): retries_remaining = self.increment( retry_settings, request=request.http_request, From 34ed49bd3b03636571c27121f54e6637661103d5 Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Wed, 24 Jul 2024 18:28:50 -0700 Subject: [PATCH 12/22] Whitespace --- .../azure/storage/blob/aio/_download_async.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 5acb28810dde..77649b7e0dd1 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -136,7 +136,7 @@ async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes )) except HttpResponseError as error: process_storage_error(error) - + try: chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) retry_active = False @@ -408,7 +408,7 @@ async def _initial_request(self): self._file_size = 0 else: process_storage_error(error) - + try: if self.size == 0: self._current_content = b"" @@ -870,4 +870,4 @@ async def download_to_stream(self, stream, max_concurrency=1): self._max_concurrency = max_concurrency await self.readinto(stream) return self.properties - + From 55e0a88731eb50a224d1db4258823f237907866f Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Thu, 25 Jul 2024 11:05:32 -0700 Subject: [PATCH 13/22] Nit again --- .../azure-storage-blob/azure/storage/blob/aio/_download_async.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 77649b7e0dd1..a48e60dc236a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -870,4 +870,3 @@ async def download_to_stream(self, stream, max_concurrency=1): self._max_concurrency = max_concurrency await self.readinto(stream) return self.properties - From f9703e5ec1f06cb0c8f289df7777c771f8658859 Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Thu, 25 Jul 2024 12:33:44 -0700 Subject: [PATCH 14/22] Mypy now wants a piece --- .../azure/storage/blob/aio/_download_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index a48e60dc236a..dab5afdca85d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -47,7 +47,7 @@ async def process_content(data: Any, start_offset: int, end_offset: int, encrypt if data is None: raise ValueError("Response cannot be None.") await data.response.read() - content = data.response.content + content = cast(bytes, data.response.content) if encryption.get('key') is not None or encryption.get('resolver') is not None: try: return decrypt_blob( From 1f6afe01aa4388a2e15a3ec442737a3177bb4cb0 Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Thu, 25 Jul 2024 17:36:59 -0700 Subject: [PATCH 15/22] Adjust test case to match our new retry logic --- sdk/storage/azure-storage-blob/tests/test_retry_async.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sdk/storage/azure-storage-blob/tests/test_retry_async.py b/sdk/storage/azure-storage-blob/tests/test_retry_async.py index f19e092d6956..3b676f5489f3 100644 --- a/sdk/storage/azure-storage-blob/tests/test_retry_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_retry_async.py @@ -8,11 +8,12 @@ import pytest from unittest import mock -from aiohttp.client_exceptions import ClientPayloadError, ServerTimeoutError +from aiohttp.client_exceptions import ServerTimeoutError from aiohttp.streams import StreamReader from azure.core.exceptions import ( AzureError, ClientAuthenticationError, + IncompleteReadError, HttpResponseError, ResourceExistsError, ServiceResponseError @@ -520,13 +521,13 @@ async def test_streaming_retry(self, **kwargs): stream_reader_read_mock = mock.MagicMock() future = asyncio.Future() - future.set_exception(ClientPayloadError()) + future.set_exception(IncompleteReadError()) stream_reader_read_mock.return_value = future with mock.patch.object(StreamReader, "read", stream_reader_read_mock), pytest.raises(HttpResponseError): blob = container.get_blob_client(blob=blob_name) count = [0] blob._pipeline._transport.send = self._count_wrapper(count, blob._pipeline._transport.send) await blob.download_blob() - assert stream_reader_read_mock.call_count == count[0] == 4 + assert stream_reader_read_mock.call_count == count[0] == 3 # ------------------------------------------------------------------------------ From 47c95e987a9aebf8e7543cfdec10adb62eaa8c19 Mon Sep 17 00:00:00 2001 From: vincenttran-msft Date: Mon, 12 Aug 2024 14:22:32 -0700 Subject: [PATCH 16/22] Regen, no pinning --- .../aio/operations/_blob_operations.py | 26 +-- .../_generated/operations/_blob_operations.py | 26 +-- .../aio/operations/_path_operations.py | 4 +- .../_generated/operations/_path_operations.py | 4 +- .../aio/operations/_file_operations.py | 176 +++++------------- .../_generated/operations/_file_operations.py | 176 +++++------------- .../aio/operations/_queue_operations.py | 12 +- .../operations/_queue_operations.py | 12 +- 8 files changed, 124 insertions(+), 312 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py index 60a242d61915..9cf0b47f035d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -293,8 +293,6 @@ async def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) response_headers["x-ms-creation-time"] = self._deserialize( @@ -377,7 +375,7 @@ async def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2574,19 +2572,11 @@ async def set_tier( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if response.status_code == 202: - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: return cls(pipeline_response, None, response_headers) # type: ignore @@ -2845,8 +2835,6 @@ async def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) @@ -2908,7 +2896,7 @@ async def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index 6796705afcc5..83033f6b9f4c 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -1700,8 +1700,6 @@ def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) response_headers["x-ms-creation-time"] = self._deserialize( @@ -1784,7 +1782,7 @@ def download( ) response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3981,19 +3979,11 @@ def set_tier( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if response.status_code == 202: - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: return cls(pipeline_response, None, response_headers) # type: ignore @@ -4252,8 +4242,6 @@ def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - if response.status_code == 206: response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) @@ -4315,7 +4303,7 @@ def query( "bytearray", response.headers.get("x-ms-blob-content-md5") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py index edd172f6b53d..ad9a7a7294ea 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py @@ -880,8 +880,6 @@ async def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - if response.status_code == 206: response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) @@ -916,7 +914,7 @@ async def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py index 4c6d623ee029..eb2d08acfa37 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py @@ -1742,8 +1742,6 @@ def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - if response.status_code == 206: response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) @@ -1778,7 +1776,7 @@ def read( "str", response.headers.get("x-ms-encryption-key-sha256") ) - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py index 95d41bc650cf..35bc561a6c07 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py @@ -318,131 +318,57 @@ async def download( raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-md5") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-file-attributes"] = self._deserialize( - "str", response.headers.get("x-ms-file-attributes") - ) - response_headers["x-ms-file-creation-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-creation-time") - ) - response_headers["x-ms-file-last-write-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-last-write-time") - ) - response_headers["x-ms-file-change-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-change-time") - ) - response_headers["x-ms-file-permission-key"] = self._deserialize( - "str", response.headers.get("x-ms-file-permission-key") - ) - response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id")) - response_headers["x-ms-file-parent-id"] = self._deserialize( - "str", response.headers.get("x-ms-file-parent-id") - ) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if response.status_code == 206: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-md5") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-file-attributes"] = self._deserialize( - "str", response.headers.get("x-ms-file-attributes") - ) - response_headers["x-ms-file-creation-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-creation-time") - ) - response_headers["x-ms-file-last-write-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-last-write-time") - ) - response_headers["x-ms-file-change-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-change-time") - ) - response_headers["x-ms-file-permission-key"] = self._deserialize( - "str", response.headers.get("x-ms-file-permission-key") - ) - response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id")) - response_headers["x-ms-file-parent-id"] = self._deserialize( - "str", response.headers.get("x-ms-file-parent-id") - ) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-content-md5"] = self._deserialize("bytearray", response.headers.get("x-ms-content-md5")) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-file-attributes"] = self._deserialize( + "str", response.headers.get("x-ms-file-attributes") + ) + response_headers["x-ms-file-creation-time"] = self._deserialize( + "str", response.headers.get("x-ms-file-creation-time") + ) + response_headers["x-ms-file-last-write-time"] = self._deserialize( + "str", response.headers.get("x-ms-file-last-write-time") + ) + response_headers["x-ms-file-change-time"] = self._deserialize( + "str", response.headers.get("x-ms-file-change-time") + ) + response_headers["x-ms-file-permission-key"] = self._deserialize( + "str", response.headers.get("x-ms-file-permission-key") + ) + response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id")) + response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py index 471d62823c7a..c07fbd141e66 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py @@ -1358,131 +1358,57 @@ def download( raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-md5") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-file-attributes"] = self._deserialize( - "str", response.headers.get("x-ms-file-attributes") - ) - response_headers["x-ms-file-creation-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-creation-time") - ) - response_headers["x-ms-file-last-write-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-last-write-time") - ) - response_headers["x-ms-file-change-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-change-time") - ) - response_headers["x-ms-file-permission-key"] = self._deserialize( - "str", response.headers.get("x-ms-file-permission-key") - ) - response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id")) - response_headers["x-ms-file-parent-id"] = self._deserialize( - "str", response.headers.get("x-ms-file-parent-id") - ) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if response.status_code == 206: - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-copy-completion-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-copy-completion-time") - ) - response_headers["x-ms-copy-status-description"] = self._deserialize( - "str", response.headers.get("x-ms-copy-status-description") - ) - response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) - response_headers["x-ms-copy-progress"] = self._deserialize( - "str", response.headers.get("x-ms-copy-progress") - ) - response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) - response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) - response_headers["x-ms-content-md5"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-md5") - ) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-file-attributes"] = self._deserialize( - "str", response.headers.get("x-ms-file-attributes") - ) - response_headers["x-ms-file-creation-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-creation-time") - ) - response_headers["x-ms-file-last-write-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-last-write-time") - ) - response_headers["x-ms-file-change-time"] = self._deserialize( - "str", response.headers.get("x-ms-file-change-time") - ) - response_headers["x-ms-file-permission-key"] = self._deserialize( - "str", response.headers.get("x-ms-file-permission-key") - ) - response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id")) - response_headers["x-ms-file-parent-id"] = self._deserialize( - "str", response.headers.get("x-ms-file-parent-id") - ) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-content-md5"] = self._deserialize("bytearray", response.headers.get("x-ms-content-md5")) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-file-attributes"] = self._deserialize( + "str", response.headers.get("x-ms-file-attributes") + ) + response_headers["x-ms-file-creation-time"] = self._deserialize( + "str", response.headers.get("x-ms-file-creation-time") + ) + response_headers["x-ms-file-last-write-time"] = self._deserialize( + "str", response.headers.get("x-ms-file-last-write-time") + ) + response_headers["x-ms-file-change-time"] = self._deserialize( + "str", response.headers.get("x-ms-file-change-time") + ) + response_headers["x-ms-file-permission-key"] = self._deserialize( + "str", response.headers.get("x-ms-file-permission-key") + ) + response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id")) + response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py index 405d2fed8015..981c09a9943c 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py @@ -124,15 +124,9 @@ async def create( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 201: - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if response.status_code == 204: - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py index ed09febc29cd..56069f4acb6a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py @@ -330,15 +330,9 @@ def create( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 201: - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if response.status_code == 204: - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: return cls(pipeline_response, None, response_headers) # type: ignore From 85f8790a90e89754b5a40084c34c48544745fc37 Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 16 Aug 2024 13:20:39 -0700 Subject: [PATCH 17/22] Blob refactor --- .../storage/blob/_shared/policies_async.py | 65 +++++-------------- .../filedatalake/_shared/policies_async.py | 12 ---- .../fileshare/_shared/policies_async.py | 12 ---- .../storage/queue/_shared/policies_async.py | 12 ---- 4 files changed, 15 insertions(+), 86 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index 9ba9f748da4a..29d404b3a1af 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -17,12 +17,7 @@ from .authentication import AzureSigningError, StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode -from .policies import StorageRetryPolicy, StorageContentValidation - -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str +from .policies import StorageRetryPolicy, StorageContentValidation, is_retry if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -35,48 +30,6 @@ _LOGGER = logging.getLogger(__name__) -def encode_base64(data): - if isinstance(data, _unicode_type): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - -# Is this method/status code retryable? (Based on allowlists and control -# variables such as the number of total retries to allow, whether to -# respect the Retry-After header, whether this header is present, and -# whether the returned status code is on the list of status codes to -# be retried upon on the presence of the aforementioned header) -async def is_retry(response, mode): # pylint: disable=too-many-return-statements - status = response.http_response.status_code - if 300 <= status < 500: - # An exception occurred, but in most cases it was expected. Examples could - # include a 309 Conflict or 412 Precondition Failed. - if status == 404 and mode == LocationMode.SECONDARY: - # Response code 404 should be retried if secondary was used. - return True - if status == 408: - # Response code 408 is a timeout and should be retried. - return True - return False - if status >= 500: - # Response codes above 500 with the exception of 501 Not Implemented and - # 505 Version Not Supported indicate a server issue and should be retried. - if status in [501, 505]: - return False - return True - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False - async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -114,7 +67,13 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = await is_retry(response, request.context.options.get('mode')) + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + will_retry = is_retry(response, request.context.options.get('mode')) + # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -160,7 +119,13 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if await is_retry(response, retry_settings['mode']): + + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + if is_retry(response, retry_settings['mode']): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py index d48792e294c9..065cbf7c63d6 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py @@ -19,11 +19,6 @@ from .models import LocationMode from .policies import StorageRetryPolicy, StorageContentValidation -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline import ( # pylint: disable=non-abstract-transport-import @@ -35,13 +30,6 @@ _LOGGER = logging.getLogger(__name__) -def encode_base64(data): - if isinstance(data, _unicode_type): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - # Is this method/status code retryable? (Based on allowlists and control # variables such as the number of total retries to allow, whether to # respect the Retry-After header, whether this header is present, and diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index 9ba9f748da4a..6b5420590748 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -19,11 +19,6 @@ from .models import LocationMode from .policies import StorageRetryPolicy, StorageContentValidation -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import @@ -35,13 +30,6 @@ _LOGGER = logging.getLogger(__name__) -def encode_base64(data): - if isinstance(data, _unicode_type): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - # Is this method/status code retryable? (Based on allowlists and control # variables such as the number of total retries to allow, whether to # respect the Retry-After header, whether this header is present, and diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index 9ba9f748da4a..6b5420590748 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -19,11 +19,6 @@ from .models import LocationMode from .policies import StorageRetryPolicy, StorageContentValidation -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import @@ -35,13 +30,6 @@ _LOGGER = logging.getLogger(__name__) -def encode_base64(data): - if isinstance(data, _unicode_type): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - # Is this method/status code retryable? (Based on allowlists and control # variables such as the number of total retries to allow, whether to # respect the Retry-After header, whether this header is present, and From f5b0dd0f92b98fa0fc716a6255156e4277e4082b Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 16 Aug 2024 14:34:58 -0700 Subject: [PATCH 18/22] Refactor pt2 --- .../azure/storage/blob/_shared/policies.py | 13 ++++++++++-- .../storage/blob/_shared/policies_async.py | 20 +++++++++++++++---- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py index 803515717797..e085383fb528 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py @@ -104,6 +104,15 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements return False +def is_checksum_retry(response): + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + def urljoin(base_url, stub_url): parsed = urlparse(base_url) parsed = parsed._replace(path=parsed.path + '/' + stub_url) @@ -301,7 +310,7 @@ def send(self, request: "PipelineRequest") -> "PipelineResponse": response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -527,7 +536,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index 29d404b3a1af..bfa9e0a5f6be 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -16,8 +16,7 @@ from .authentication import AzureSigningError, StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .models import LocationMode -from .policies import StorageRetryPolicy, StorageContentValidation, is_retry +from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -44,6 +43,19 @@ async def retry_hook(settings, **kwargs): **kwargs) +async def is_checksum_retry(response): + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + class AsyncStorageResponseHook(AsyncHTTPPolicy): def __init__(self, **kwargs): # pylint: disable=unused-argument @@ -72,7 +84,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": await response.http_response.read() # Load the body in memory and close the socket except (StreamClosedError, StreamConsumedError): pass - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 @@ -125,7 +137,7 @@ async def send(self, request): await response.http_response.read() # Load the body in memory and close the socket except (StreamClosedError, StreamConsumedError): pass - if is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, From 3afb6579c095df37fedd18b75c652659b166574f Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 16 Aug 2024 14:36:07 -0700 Subject: [PATCH 19/22] Refactor nit --- .../azure/storage/blob/_shared/policies.py | 8 +------- .../azure/storage/blob/_shared/policies_async.py | 1 + 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py index e085383fb528..4c933035a13d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py @@ -95,16 +95,10 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False def is_checksum_retry(response): + # retry if invalid content md5 if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = response.http_request.headers.get('content-md5', None) or \ encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index bfa9e0a5f6be..66b132e37029 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -44,6 +44,7 @@ async def retry_hook(settings, **kwargs): async def is_checksum_retry(response): + # retry if invalid content md5 if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): try: await response.http_response.read() # Load the body in memory and close the socket From f5d523eb737a317fa71b068cca081c03a1605c55 Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 16 Aug 2024 14:50:40 -0700 Subject: [PATCH 20/22] Full refactor --- .../azure/storage/blob/_shared/policies.py | 9 +-- .../storage/filedatalake/_shared/policies.py | 16 +++--- .../filedatalake/_shared/policies_async.py | 56 ++++++------------- .../storage/fileshare/_shared/policies.py | 16 +++--- .../fileshare/_shared/policies_async.py | 56 ++++++------------- .../azure/storage/queue/_shared/policies.py | 16 +++--- .../storage/queue/_shared/policies_async.py | 56 ++++++------------- 7 files changed, 74 insertions(+), 151 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py index 4c933035a13d..bdf6f70123ca 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py @@ -35,11 +35,6 @@ from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials import TokenCredential from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import @@ -52,7 +47,7 @@ def encode_base64(data): - if isinstance(data, _unicode_type): + if isinstance(data, str): data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') @@ -101,7 +96,7 @@ def is_checksum_retry(response): # retry if invalid content md5 if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) if response.http_response.headers['content-md5'] != computed_md5: return True return False diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py index 09600f0bafb1..111f7628ef14 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py @@ -35,11 +35,6 @@ from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials import TokenCredential from azure.core.pipeline import ( # pylint: disable=non-abstract-transport-import @@ -52,7 +47,7 @@ def encode_base64(data): - if isinstance(data, _unicode_type): + if isinstance(data, str): data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') @@ -95,10 +90,13 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True + + +def is_checksum_retry(response): # retry if invalid content md5 if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) if response.http_response.headers['content-md5'] != computed_md5: return True return False @@ -301,7 +299,7 @@ def send(self, request: "PipelineRequest") -> "PipelineResponse": response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -529,7 +527,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py index 065cbf7c63d6..de9af9621e8b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py @@ -16,8 +16,7 @@ from .authentication import AzureSigningError, StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .models import LocationMode -from .policies import StorageRetryPolicy, StorageContentValidation +from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -30,41 +29,6 @@ _LOGGER = logging.getLogger(__name__) -# Is this method/status code retryable? (Based on allowlists and control -# variables such as the number of total retries to allow, whether to -# respect the Retry-After header, whether this header is present, and -# whether the returned status code is on the list of status codes to -# be retried upon on the presence of the aforementioned header) -async def is_retry(response, mode): # pylint: disable=too-many-return-statements - status = response.http_response.status_code - if 300 <= status < 500: - # An exception occurred, but in most cases it was expected. Examples could - # include a 309 Conflict or 412 Precondition Failed. - if status == 404 and mode == LocationMode.SECONDARY: - # Response code 404 should be retried if secondary was used. - return True - if status == 408: - # Response code 408 is a timeout and should be retried. - return True - return False - if status >= 500: - # Response codes above 500 with the exception of 501 Not Implemented and - # 505 Version Not Supported indicate a server issue and should be retried. - if status in [501, 505]: - return False - return True - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False - async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -79,6 +43,20 @@ async def retry_hook(settings, **kwargs): **kwargs) +async def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + class AsyncStorageResponseHook(AsyncHTTPPolicy): def __init__(self, **kwargs): # pylint: disable=unused-argument @@ -102,7 +80,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = await is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -148,7 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if await is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py index 01383d8af468..f2d7a17993aa 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py @@ -35,11 +35,6 @@ from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials import TokenCredential from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import @@ -52,7 +47,7 @@ def encode_base64(data): - if isinstance(data, _unicode_type): + if isinstance(data, str): data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') @@ -95,10 +90,13 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True + + +def is_checksum_retry(response): # retry if invalid content md5 if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) if response.http_response.headers['content-md5'] != computed_md5: return True return False @@ -301,7 +299,7 @@ def send(self, request: "PipelineRequest") -> "PipelineResponse": response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -526,7 +524,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index 6b5420590748..eea951fc3111 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -16,8 +16,7 @@ from .authentication import AzureSigningError, StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .models import LocationMode -from .policies import StorageRetryPolicy, StorageContentValidation +from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -30,41 +29,6 @@ _LOGGER = logging.getLogger(__name__) -# Is this method/status code retryable? (Based on allowlists and control -# variables such as the number of total retries to allow, whether to -# respect the Retry-After header, whether this header is present, and -# whether the returned status code is on the list of status codes to -# be retried upon on the presence of the aforementioned header) -async def is_retry(response, mode): # pylint: disable=too-many-return-statements - status = response.http_response.status_code - if 300 <= status < 500: - # An exception occurred, but in most cases it was expected. Examples could - # include a 309 Conflict or 412 Precondition Failed. - if status == 404 and mode == LocationMode.SECONDARY: - # Response code 404 should be retried if secondary was used. - return True - if status == 408: - # Response code 408 is a timeout and should be retried. - return True - return False - if status >= 500: - # Response codes above 500 with the exception of 501 Not Implemented and - # 505 Version Not Supported indicate a server issue and should be retried. - if status in [501, 505]: - return False - return True - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False - async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -79,6 +43,20 @@ async def retry_hook(settings, **kwargs): **kwargs) +async def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + class AsyncStorageResponseHook(AsyncHTTPPolicy): def __init__(self, **kwargs): # pylint: disable=unused-argument @@ -102,7 +80,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = await is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -148,7 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if await is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py index 4f22c181bb2e..0fb1aa0a7d05 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py @@ -35,11 +35,6 @@ from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: from azure.core.credentials import TokenCredential from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import @@ -52,7 +47,7 @@ def encode_base64(data): - if isinstance(data, _unicode_type): + if isinstance(data, str): data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') @@ -95,10 +90,13 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True + + +def is_checksum_retry(response): # retry if invalid content md5 if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) if response.http_response.headers['content-md5'] != computed_md5: return True return False @@ -307,7 +305,7 @@ def send(self, request: "PipelineRequest") -> "PipelineResponse": response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -532,7 +530,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index 6b5420590748..eea951fc3111 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -16,8 +16,7 @@ from .authentication import AzureSigningError, StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .models import LocationMode -from .policies import StorageRetryPolicy, StorageContentValidation +from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -30,41 +29,6 @@ _LOGGER = logging.getLogger(__name__) -# Is this method/status code retryable? (Based on allowlists and control -# variables such as the number of total retries to allow, whether to -# respect the Retry-After header, whether this header is present, and -# whether the returned status code is on the list of status codes to -# be retried upon on the presence of the aforementioned header) -async def is_retry(response, mode): # pylint: disable=too-many-return-statements - status = response.http_response.status_code - if 300 <= status < 500: - # An exception occurred, but in most cases it was expected. Examples could - # include a 309 Conflict or 412 Precondition Failed. - if status == 404 and mode == LocationMode.SECONDARY: - # Response code 404 should be retried if secondary was used. - return True - if status == 408: - # Response code 408 is a timeout and should be retried. - return True - return False - if status >= 500: - # Response codes above 500 with the exception of 501 Not Implemented and - # 505 Version Not Supported indicate a server issue and should be retried. - if status in [501, 505]: - return False - return True - # retry if invalid content md5 - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass - computed_md5 = response.http_request.headers.get('content-md5', None) or \ - encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) - if response.http_response.headers['content-md5'] != computed_md5: - return True - return False - async def retry_hook(settings, **kwargs): if settings['hook']: if asyncio.iscoroutine(settings['hook']): @@ -79,6 +43,20 @@ async def retry_hook(settings, **kwargs): **kwargs) +async def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + class AsyncStorageResponseHook(AsyncHTTPPolicy): def __init__(self, **kwargs): # pylint: disable=unused-argument @@ -102,7 +80,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) - will_retry = await is_retry(response, request.context.options.get('mode')) + will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) @@ -148,7 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if await is_retry(response, retry_settings['mode']): + if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, From 50ecd1551749654455318abdeffd46d38c6451dd Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 16 Aug 2024 15:31:43 -0700 Subject: [PATCH 21/22] Fix bad C+P in refactor --- .../azure/storage/blob/_shared/policies.py | 2 +- .../azure/storage/blob/_shared/policies_async.py | 14 +------------- .../azure/storage/filedatalake/_shared/policies.py | 2 +- .../storage/filedatalake/_shared/policies_async.py | 2 +- .../azure/storage/fileshare/_shared/policies.py | 2 +- .../storage/fileshare/_shared/policies_async.py | 2 +- .../azure/storage/queue/_shared/policies.py | 2 +- .../azure/storage/queue/_shared/policies_async.py | 2 +- 8 files changed, 8 insertions(+), 20 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py index bdf6f70123ca..21793bf46ef8 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py @@ -525,7 +525,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index 66b132e37029..ea35c39b1ce9 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -79,12 +79,6 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) # Auth error could come from Bearer challenge, in which case this request will be made again @@ -132,13 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - - if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): - try: - await response.http_response.read() # Load the body in memory and close the socket - except (StreamClosedError, StreamConsumedError): - pass - if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py index 111f7628ef14..7d9ab5c1d76b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py @@ -527,7 +527,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py index de9af9621e8b..ab6d488e2bea 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py @@ -126,7 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py index f2d7a17993aa..f2c951be89c1 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py @@ -524,7 +524,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index eea951fc3111..4f01a43a9a4f 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -126,7 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py index 0fb1aa0a7d05..4d527cf68330 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py @@ -530,7 +530,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index eea951fc3111..4f01a43a9a4f 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -126,7 +126,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response): + if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, From e0c7465e252b17d875c158b5a6ae1d8a3eb53eaf Mon Sep 17 00:00:00 2001 From: Vincent Tran Date: Fri, 16 Aug 2024 17:51:50 -0700 Subject: [PATCH 22/22] Pylint --- .../azure-storage-blob/azure/storage/blob/_shared/policies.py | 1 + .../azure/storage/blob/_shared/policies_async.py | 3 +-- .../azure/storage/filedatalake/_shared/policies.py | 1 + .../azure/storage/filedatalake/_shared/policies_async.py | 1 - .../azure/storage/fileshare/_shared/policies.py | 1 + .../azure/storage/fileshare/_shared/policies_async.py | 1 - .../azure/storage/queue/_shared/policies.py | 2 +- .../azure/storage/queue/_shared/policies_async.py | 1 - 8 files changed, 5 insertions(+), 6 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py index 21793bf46ef8..07de6ad5579e 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py @@ -90,6 +90,7 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True + return False def is_checksum_retry(response): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py index ea35c39b1ce9..67987a090f33 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies_async.py @@ -6,7 +6,6 @@ # pylint: disable=invalid-overridden-method import asyncio -import base64 import logging import random from typing import Any, Dict, TYPE_CHECKING @@ -80,7 +79,7 @@ async def send(self, request: "PipelineRequest") -> "PipelineResponse": response = await self.next.send(request) will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) - + # Auth error could come from Bearer challenge, in which case this request will be made again is_auth_error = response.http_response.status_code == 401 should_update_counts = not (will_retry or is_auth_error) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py index 7d9ab5c1d76b..6e07fae56e70 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py @@ -90,6 +90,7 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True + return False def is_checksum_retry(response): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py index ab6d488e2bea..66faaf7ce894 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies_async.py @@ -6,7 +6,6 @@ # pylint: disable=invalid-overridden-method import asyncio -import base64 import logging import random from typing import Any, Dict, TYPE_CHECKING diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py index f2c951be89c1..dc04463a5835 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies.py @@ -90,6 +90,7 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True + return False def is_checksum_retry(response): diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py index 4f01a43a9a4f..374077bdd4d9 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/policies_async.py @@ -6,7 +6,6 @@ # pylint: disable=invalid-overridden-method import asyncio -import base64 import logging import random from typing import Any, Dict, TYPE_CHECKING diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py index 4d527cf68330..d6f3daab5c7a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py @@ -90,7 +90,7 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements if status in [501, 505]: return False return True - + return False def is_checksum_retry(response): # retry if invalid content md5 diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index 4f01a43a9a4f..374077bdd4d9 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -6,7 +6,6 @@ # pylint: disable=invalid-overridden-method import asyncio -import base64 import logging import random from typing import Any, Dict, TYPE_CHECKING