Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Storage] az storage blob copy start: Fix issue in copy from different account #18730

Merged
merged 3 commits into from
Jul 13, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions src/azure-cli/azure/cli/command_modules/storage/_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,8 +387,8 @@ def validate_source_uri(cmd, namespace): # pylint: disable=too-many-statements
namespace.copy_source = uri


def validate_source_url(cmd, namespace): # pylint: disable=too-many-statements
from .util import create_short_lived_blob_sas, create_short_lived_file_sas
def validate_source_url(cmd, namespace): # pylint: disable=too-many-statements, too-many-locals
from .util import create_short_lived_blob_sas, create_short_lived_blob_sas_v2, create_short_lived_file_sas
from azure.cli.core.azclierror import InvalidArgumentValueError, RequiredArgumentMissingError, \
MutuallyExclusiveArgumentError
usage_string = \
Expand Down Expand Up @@ -472,7 +472,14 @@ def validate_source_url(cmd, namespace): # pylint: disable=too-many-statements
source_sas = create_short_lived_file_sas(cmd, source_account_name, source_account_key, share,
dir_name, file_name)
elif valid_blob_source and (ns.get('share_name', None) or not same_account):
source_sas = create_short_lived_blob_sas(cmd, source_account_name, source_account_key, container, blob)
prefix = cmd.command_kwargs['resource_type'].value[0]
# is_storagev2() is used to distinguish if the command is in track2 SDK
# If yes, we will use get_login_credentials() as token credential
if is_storagev2(prefix):
source_sas = create_short_lived_blob_sas_v2(cmd, source_account_name, source_account_key, container,
blob)
else:
source_sas = create_short_lived_blob_sas(cmd, source_account_name, source_account_key, container, blob)

query_params = []
if source_sas:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,23 +134,36 @@ def test_storage_blob_copy_same_account_sas(self, resource_group, storage_accoun
self.assertEqual(expect_content, actual_content)

@ResourceGroupPreparer()
@StorageAccountPreparer(kind='storageV2')
def test_storage_blob_copy_requires_sync(self, resource_group, storage_account):
@StorageAccountPreparer(parameter_name='account1', kind='storageV2')
@StorageAccountPreparer(parameter_name='account2', kind='storageV2')
def test_storage_blob_copy_requires_sync(self, resource_group, account1, account2):
source_file = self.create_temp_file(16, full_random=True)
account_info = self.get_account_info(resource_group, storage_account)
account1_info = self.get_account_info(resource_group, account1)
account2_info = self.get_account_info(resource_group, account2)

source_container = self.create_container(account_info)
target_container = self.create_container(account_info)
# Prepare
source_container = self.create_container(account1_info)
target_container1 = self.create_container(account1_info)
target_container2 = self.create_container(account2_info)

self.storage_cmd('storage blob upload -c {} -f "{}" -n src', account_info,
self.storage_cmd('storage blob upload -c {} -f "{}" -n src', account1_info,
source_container, source_file)
source_uri = self.storage_cmd('storage blob url -c {} -n src', account_info, source_container).output
self.storage_cmd('storage blob copy start -b dst -c {} --source-uri {}', account_info,
target_container, source_uri)

self.storage_cmd('storage blob upload -c {} -f "{}" -n pagesrc --type page', account_info,
# with different account name and account key
self.storage_cmd('storage blob copy start --destination-blob dst --destination-container {} '
'--source-account-name {} --source-container {} --source-blob src --requires-sync true',
account2_info, target_container2, account1, source_container)

# with source uri in the same account
source_uri = self.storage_cmd('storage blob url -c {} -n src', account1_info, source_container).output
self.storage_cmd('storage blob copy start -b dst -c {} --source-uri {}', account1_info,
target_container1, source_uri)

self.storage_cmd('storage blob upload -c {} -f "{}" -n pagesrc --type page', account1_info,
source_container, source_file)
source_uri = self.storage_cmd('storage blob url -c {} -n pagesrc', account_info, source_container).output
source_uri = self.storage_cmd('storage blob url -c {} -n pagesrc', account1_info, source_container).output
# expect failure with page blob
self.storage_cmd_negative('storage blob copy start -b dst -c {} --source-uri {} --requires-sync', account_info,
target_container, source_uri)
self.storage_cmd_negative('storage blob copy start -b dst -c {} --source-uri {} --requires-sync', account1_info,
target_container1, source_uri)


13 changes: 13 additions & 0 deletions src/azure-cli/azure/cli/command_modules/storage/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@


import os
from azure.cli.core.profiles import ResourceType


def collect_blobs(blob_service, container, pattern=None):
Expand Down Expand Up @@ -116,6 +117,18 @@ def create_short_lived_blob_sas(cmd, account_name, account_key, container, blob)
return sas.generate_blob(container, blob, permission=t_blob_permissions(read=True), expiry=expiry, protocol='https')


def create_short_lived_blob_sas_v2(cmd, account_name, account_key, container, blob):
from datetime import datetime, timedelta

t_sas = cmd.get_models('_shared_access_signature#BlobSharedAccessSignature',
resource_type=ResourceType.DATA_STORAGE_BLOB)

t_blob_permissions = cmd.get_models('_models#BlobSasPermissions', resource_type=ResourceType.DATA_STORAGE_BLOB)
expiry = (datetime.utcnow() + timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ')
sas = t_sas(account_name, account_key)
return sas.generate_blob(container, blob, permission=t_blob_permissions(read=True), expiry=expiry, protocol='https')


def create_short_lived_file_sas(cmd, account_name, account_key, share, directory_name, file_name):
from datetime import datetime, timedelta
if cmd.supported_api_version(min_api='2017-04-17'):
Expand Down