Skip to content

Commit

Permalink
[Storage] Prepare hotfix release (#34608)
Browse files Browse the repository at this point in the history
  • Loading branch information
jalauzon-msft authored Mar 4, 2024
1 parent 9979486 commit 18e691a
Show file tree
Hide file tree
Showing 7 changed files with 90 additions and 4 deletions.
6 changes: 6 additions & 0 deletions sdk/storage/azure-storage-blob/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# Release History

## 12.19.1 (2024-03-04)

### Bugs Fixed
- Fixed an issue where under rare circumstances, full downloads of sparse Page Blobs could result in the
downloaded content containing up to one "chunk" of extra `\x00` at the end due to an optimization error.

## 12.19.0 (2023-11-07)

### Features Added
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-blob",
"Tag": "python/storage/azure-storage-blob_12c8154ae2"
"Tag": "python/storage/azure-storage-blob_1b66da54e8"
}
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,8 @@ def _download_chunk(self, chunk_start, chunk_end):
# No need to download the empty chunk from server if there's no data in the chunk to be downloaded.
# Do optimize and create empty chunk locally if condition is met.
if self._do_optimize(download_range[0], download_range[1]):
chunk_data = b"\x00" * self.chunk_size
data_size = download_range[1] - download_range[0] + 1
chunk_data = b"\x00" * data_size
else:
range_header, range_validation = validate_and_format_range_headers(
download_range[0],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
# license information.
# --------------------------------------------------------------------------

VERSION = "12.19.0"
VERSION = "12.19.1"
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,8 @@ async def _download_chunk(self, chunk_start, chunk_end):
# No need to download the empty chunk from server if there's no data in the chunk to be downloaded.
# Do optimize and create empty chunk locally if condition is met.
if self._do_optimize(download_range[0], download_range[1]):
chunk_data = b"\x00" * self.chunk_size
data_size = download_range[1] - download_range[0] + 1
chunk_data = b"\x00" * data_size
else:
range_header, range_validation = validate_and_format_range_headers(
download_range[0],
Expand Down
39 changes: 39 additions & 0 deletions sdk/storage/azure-storage-blob/tests/test_page_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -2269,6 +2269,45 @@ def test_download_sparse_page_blob_parallel(self, **kwargs):

content = blob_client.download_blob(max_concurrency=3).readall()

@BlobPreparer()
@recorded_by_proxy
def test_download_sparse_page_blob_uneven_chunks(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange
bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key)
self._setup(bsc)

# Choose an initial size, chunk size, and blob size, so the last chunk spills over end of blob
self.config.max_single_get_size = 4 * 1024
self.config.max_chunk_get_size = 4 * 1024
sparse_page_blob_size = 10 * 1024

blob_client = self._get_blob_reference(bsc)
blob_client.create_page_blob(sparse_page_blob_size)

data = b'12345678' * 128 # 1024 bytes
range_start = 2 * 1024 + 512
blob_client.upload_page(data, offset=range_start, length=len(data))

# Act
content = blob_client.download_blob().readall()

# Assert
assert sparse_page_blob_size == len(content)
start = end = 0
for r in blob_client.list_page_ranges():
if not r.cleared:
start = r.start
end = r.end

assert data == content[start: end + 1]
for byte in content[:start - 1]:
assert byte == 0
for byte in content[end + 1:]:
assert byte == 0

@BlobPreparer()
@recorded_by_proxy
def test_upload_progress_chunked_non_parallel(self, **kwargs):
Expand Down
39 changes: 39 additions & 0 deletions sdk/storage/azure-storage-blob/tests/test_page_blob_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -2238,6 +2238,45 @@ async def test_download_sparse_page_blob(self, storage_account_name, storage_acc
except:
assert byte == 0

@BlobPreparer()
@recorded_by_proxy_async
async def test_download_sparse_page_blob_uneven_chunks(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange
bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key)
await self._setup(bsc)

# Choose an initial size, chunk size, and blob size, so the last chunk spills over end of blob
self.config.max_single_get_size = 4 * 1024
self.config.max_chunk_get_size = 4 * 1024
sparse_page_blob_size = 10 * 1024

blob_client = self._get_blob_reference(bsc)
await blob_client.create_page_blob(sparse_page_blob_size)

data = b'12345678' * 128 # 1024 bytes
range_start = 2 * 1024 + 512
await blob_client.upload_page(data, offset=range_start, length=len(data))

# Act
content = await (await blob_client.download_blob()).readall()

# Assert
assert sparse_page_blob_size == len(content)
start = end = 0
async for r in blob_client.list_page_ranges():
if not r.cleared:
start = r.start
end = r.end

assert data == content[start: end + 1]
for byte in content[:start - 1]:
assert byte == 0
for byte in content[end + 1:]:
assert byte == 0

@BlobPreparer()
@recorded_by_proxy_async
async def test_upload_progress_chunked_non_parallel(self, **kwargs):
Expand Down

0 comments on commit 18e691a

Please sign in to comment.