diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 7b97d2aa3153..1ea349af5129 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -576,7 +576,7 @@ def _download_blob_options(self, offset=None, length=None, **kwargs): 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, 'cpk_info': cpk_info, - 'cls': deserialize_blob_stream, + 'cls': kwargs.pop('cls', None) or deserialize_blob_stream, 'max_concurrency':kwargs.pop('max_concurrency', 1), 'encoding': kwargs.pop('encoding', None), 'timeout': kwargs.pop('timeout', None), @@ -1038,14 +1038,15 @@ def get_blob_properties(self, **kwargs): snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, - cls=deserialize_blob_properties, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, cpk_info=cpk_info, **kwargs) except StorageErrorException as error: process_storage_error(error) blob_props.name = self.blob_name - blob_props.snapshot = self.snapshot - blob_props.container = self.container_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name + blob_props.snapshot = self.snapshot return blob_props # type: ignore def _set_http_headers_options(self, content_settings=None, **kwargs): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py index 21750b333fcd..54d6dab2a31b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py @@ -1119,7 +1119,7 @@ async def set_expiry(self, expiry_options, timeout=None, request_id=None, expire header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str') header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') + header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'rfc-1123') # Construct and send request request = self._client.put(url, query_parameters, header_parameters) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index 66b079abbd07..394a519856a6 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -1118,7 +1118,7 @@ def set_expiry(self, expiry_options, timeout=None, request_id=None, expires_on=N header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str') header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') + header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'rfc-1123') # Construct and send request request = self._client.put(url, query_parameters, header_parameters) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 28d60fc5b6d4..c7639f891e54 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -561,14 +561,15 @@ async def get_blob_properties(self, **kwargs): snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, - cls=deserialize_blob_properties, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, cpk_info=cpk_info, **kwargs) except StorageErrorException as error: process_storage_error(error) blob_props.name = self.blob_name - blob_props.snapshot = self.snapshot - blob_props.container = self.container_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name + blob_props.snapshot = self.snapshot return blob_props # type: ignore @distributed_trace_async diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py index c97dc0915e67..82b0e5531821 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py @@ -18,7 +18,6 @@ DirectoryProperties, FileProperties, PathProperties, - PathPropertiesPaged, LeaseProperties, ContentSettings, AccountSasPermissions, @@ -38,6 +37,7 @@ AccessControlChangeFailure, AccessControlChanges, ) + from ._shared_access_signature import generate_account_sas, generate_file_system_sas, generate_directory_sas, \ generate_file_sas @@ -66,7 +66,6 @@ 'DirectoryProperties', 'FileProperties', 'PathProperties', - 'PathPropertiesPaged', 'LeaseProperties', 'ContentSettings', 'AccessControlChangeResult', diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py index 0584bb7ad9a7..b8c7ff22de27 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py @@ -3,12 +3,11 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - try: from urllib.parse import quote, unquote except ImportError: from urllib2 import quote, unquote # type: ignore - +from ._deserialize import deserialize_dir_properties from ._shared.base_client import parse_connection_str from ._data_lake_file_client import DataLakeFileClient from ._models import DirectoryProperties @@ -236,8 +235,7 @@ def get_directory_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file/directory. """ - blob_properties = self._get_path_properties(**kwargs) - return DirectoryProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return self._get_path_properties(cls=deserialize_dir_properties, **kwargs) # pylint: disable=protected-access def rename_directory(self, new_name, # type: str **kwargs): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 1d09ef6c793c..f39cd8fab5f3 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -21,8 +21,9 @@ from ._generated.models import StorageErrorException from ._download import StorageStreamDownloader from ._path_client import PathClient -from ._serialize import get_mod_conditions, get_path_http_headers, get_access_conditions, add_metadata_headers -from ._deserialize import process_storage_error +from ._serialize import get_mod_conditions, get_path_http_headers, get_access_conditions, add_metadata_headers, \ + convert_datetime_to_rfc1123 +from ._deserialize import process_storage_error, deserialize_file_properties from ._models import FileProperties, DataLakeFileQueryError @@ -246,8 +247,31 @@ def get_file_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file. """ - blob_properties = self._get_path_properties(**kwargs) - return FileProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return self._get_path_properties(cls=deserialize_file_properties, **kwargs) # pylint: disable=protected-access + + def set_file_expiry(self, expiry_options, # type: str + expires_on=None, # type: Optional[Union[datetime, int]] + **kwargs): + # type: (str, Optional[Union[datetime, int]], **Any) -> None + """Sets the time a file will expire and be deleted. + + :param str expiry_options: + Required. Indicates mode of the expiry time. + Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' + :param datetime or int expires_on: + The time to set the file to expiry. + When expiry_options is RelativeTo*, expires_on should be an int in milliseconds. + If the type of expires_on is datetime, it should be in UTC time. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + try: + expires_on = convert_datetime_to_rfc1123(expires_on) + except AttributeError: + expires_on = str(expires_on) + self._datalake_client_for_blob_operation.path \ + .set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access def _upload_options( # pylint:disable=too-many-statements self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py index 9d0881a7229e..f54a82bd0d67 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py @@ -12,6 +12,7 @@ from azure.core.pipeline.policies import ContentDecodePolicy from azure.core.exceptions import HttpResponseError, DecodeError, ResourceModifiedError, ClientAuthenticationError, \ ResourceNotFoundError, ResourceExistsError +from ._models import FileProperties, DirectoryProperties, LeaseProperties from ._shared.models import StorageErrorCode if TYPE_CHECKING: @@ -20,6 +21,45 @@ _LOGGER = logging.getLogger(__name__) +def deserialize_dir_properties(response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + dir_properties = DirectoryProperties( + metadata=metadata, + **headers + ) + return dir_properties + + +def deserialize_file_properties(response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + file_properties = FileProperties( + metadata=metadata, + **headers + ) + if 'Content-Range' in headers: + if 'x-ms-blob-content-md5' in headers: + file_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] + else: + file_properties.content_settings.content_md5 = None + return file_properties + + +def from_blob_properties(blob_properties): + file_props = FileProperties() + file_props.name = blob_properties.name + file_props.etag = blob_properties.etag + file_props.deleted = blob_properties.deleted + file_props.metadata = blob_properties.metadata + file_props.lease = blob_properties.lease + file_props.lease.__class__ = LeaseProperties + file_props.last_modified = blob_properties.last_modified + file_props.creation_time = blob_properties.creation_time + file_props.size = blob_properties.size + file_props.deleted_time = blob_properties.deleted_time + file_props.remaining_retention_days = blob_properties.remaining_retention_days + file_props.content_settings = blob_properties.content_settings + return file_props + def normalize_headers(headers): normalized = {} for key, value in headers.items(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py index 181b503d8c4a..e4efd8c23dba 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py @@ -3,8 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from ._models import FileProperties +from ._deserialize import from_blob_properties class StorageStreamDownloader(object): @@ -23,7 +22,7 @@ class StorageStreamDownloader(object): def __init__(self, downloader): self._downloader = downloader self.name = self._downloader.name - self.properties = FileProperties._from_blob_properties(self._downloader.properties) # pylint: disable=protected-access + self.properties = from_blob_properties(self._downloader.properties) # pylint: disable=protected-access self.size = self._downloader.size def __len__(self): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py index c29ae03ab2b0..5a8221b99dd5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py @@ -16,7 +16,8 @@ from azure.storage.blob import ContainerClient from ._shared.base_client import StorageAccountHostsMixin, parse_query, parse_connection_str from ._serialize import convert_dfs_url_to_blob_url -from ._models import LocationMode, FileSystemProperties, PathPropertiesPaged, PublicAccess +from ._models import LocationMode, FileSystemProperties, PublicAccess +from ._list_paths_helper import PathPropertiesPaged from ._data_lake_file_client import DataLakeFileClient from ._data_lake_directory_client import DataLakeDirectoryClient from ._data_lake_lease import DataLakeLeaseClient diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py new file mode 100644 index 000000000000..1e4b19e2767a --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py @@ -0,0 +1,73 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from azure.core.paging import PageIterator +from ._generated.models import StorageErrorException +from ._models import PathProperties +from ._deserialize import return_headers_and_deserialized_path_list +from ._generated.models import Path +from ._shared.response_handlers import process_storage_error + + +class PathPropertiesPaged(PageIterator): + """An Iterable of Path properties. + + :ivar str path: Filters the results to return only paths under the specified path. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar list(~azure.storage.filedatalake.PathProperties) current_page: The current page of listed results. + + :param callable command: Function to retrieve the next page of items. + :param str path: Filters the results to return only paths under the specified path. + :param int max_results: The maximum number of psths to retrieve per + call. + :param str continuation_token: An opaque continuation token. + """ + def __init__( + self, command, + recursive, + path=None, + max_results=None, + continuation_token=None, + upn=None): + super(PathPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.recursive = recursive + self.results_per_page = max_results + self.path = path + self.upn = upn + self.current_page = None + self.path_list = None + + def _get_next_cb(self, continuation_token): + try: + return self._command( + self.recursive, + continuation=continuation_token or None, + path=self.path, + max_results=self.results_per_page, + upn=self.upn, + cls=return_headers_and_deserialized_path_list) + except StorageErrorException as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.path_list, self._response = get_next_return + self.current_page = [self._build_item(item) for item in self.path_list] + + return self._response['continuation'] or None, self.current_page + + @staticmethod + def _build_item(item): + if isinstance(item, PathProperties): + return item + if isinstance(item, Path): + path = PathProperties._from_generated(item) # pylint: disable=protected-access + return path + return item diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py index f5aff1ae6101..5524bdc81a32 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py @@ -7,7 +7,6 @@ # pylint: disable=super-init-not-called, too-many-lines from enum import Enum -from azure.core.paging import PageIterator from azure.storage.blob import LeaseProperties as BlobLeaseProperties from azure.storage.blob import AccountSasPermissions as BlobAccountSasPermissions from azure.storage.blob import ResourceTypes as BlobResourceTypes @@ -20,10 +19,7 @@ from azure.storage.blob import ArrowDialect as BlobArrowDialect from azure.storage.blob._generated.models import StorageErrorException from azure.storage.blob._models import ContainerPropertiesPaged -from ._deserialize import return_headers_and_deserialized_path_list -from ._generated.models import Path from ._shared.models import DictMixin -from ._shared.response_handlers import process_storage_error class FileSystemProperties(object): @@ -134,34 +130,16 @@ class DirectoryProperties(DictMixin): """ def __init__(self, **kwargs): - super(DirectoryProperties, self).__init__( - **kwargs - ) - self.name = None - self.etag = None + self.name = kwargs.get('name') + self.etag = kwargs.get('ETag') self.deleted = None - self.metadata = None - self.lease = None - self.last_modified = None - self.creation_time = None + self.metadata = kwargs.get('metadata') + self.lease = LeaseProperties(**kwargs) + self.last_modified = kwargs.get('Last-Modified') + self.creation_time = kwargs.get('x-ms-creation-time') self.deleted_time = None self.remaining_retention_days = None - @classmethod - def _from_blob_properties(cls, blob_properties): - directory_props = DirectoryProperties() - directory_props.name = blob_properties.name - directory_props.etag = blob_properties.etag - directory_props.deleted = blob_properties.deleted - directory_props.metadata = blob_properties.metadata - directory_props.lease = blob_properties.lease - directory_props.lease.__class__ = LeaseProperties - directory_props.last_modified = blob_properties.last_modified - directory_props.creation_time = blob_properties.creation_time - directory_props.deleted_time = blob_properties.deleted_time - directory_props.remaining_retention_days = blob_properties.remaining_retention_days - return directory_props - class FileProperties(DictMixin): """ @@ -183,37 +161,18 @@ class FileProperties(DictMixin): """ def __init__(self, **kwargs): - super(FileProperties, self).__init__( - **kwargs - ) - self.name = None - self.etag = None + self.name = kwargs.get('name') + self.etag = kwargs.get('ETag') self.deleted = None - self.metadata = None - self.lease = None - self.last_modified = None - self.creation_time = None - self.size = None + self.metadata = kwargs.get('metadata') + self.lease = LeaseProperties(**kwargs) + self.last_modified = kwargs.get('Last-Modified') + self.creation_time = kwargs.get('x-ms-creation-time') + self.size = kwargs.get('Content-Length') self.deleted_time = None + self.expiry_time = kwargs.get("x-ms-expiry-time") self.remaining_retention_days = None - self.content_settings = None - - @classmethod - def _from_blob_properties(cls, blob_properties): - file_props = FileProperties() - file_props.name = blob_properties.name - file_props.etag = blob_properties.etag - file_props.deleted = blob_properties.deleted - file_props.metadata = blob_properties.metadata - file_props.lease = blob_properties.lease - file_props.lease.__class__ = LeaseProperties - file_props.last_modified = blob_properties.last_modified - file_props.creation_time = blob_properties.creation_time - file_props.size = blob_properties.size - file_props.deleted_time = blob_properties.deleted_time - file_props.remaining_retention_days = blob_properties.remaining_retention_days - file_props.content_settings = blob_properties.content_settings - return file_props + self.content_settings = ContentSettings(**kwargs) class PathProperties(object): @@ -261,69 +220,6 @@ def _from_generated(cls, generated): return path_prop -class PathPropertiesPaged(PageIterator): - """An Iterable of Path properties. - - :ivar str path: Filters the results to return only paths under the specified path. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar list(~azure.storage.filedatalake.PathProperties) current_page: The current page of listed results. - - :param callable command: Function to retrieve the next page of items. - :param str path: Filters the results to return only paths under the specified path. - :param int max_results: The maximum number of psths to retrieve per - call. - :param str continuation_token: An opaque continuation token. - """ - - def __init__( - self, command, - recursive, - path=None, - max_results=None, - continuation_token=None, - upn=None): - super(PathPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.recursive = recursive - self.results_per_page = max_results - self.path = path - self.upn = upn - self.current_page = None - self.path_list = None - - def _get_next_cb(self, continuation_token): - try: - return self._command( - self.recursive, - continuation=continuation_token or None, - path=self.path, - max_results=self.results_per_page, - upn=self.upn, - cls=return_headers_and_deserialized_path_list) - except StorageErrorException as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.path_list, self._response = get_next_return - self.current_page = [self._build_item(item) for item in self.path_list] - - return self._response['continuation'] or None, self.current_page - - @staticmethod - def _build_item(item): - if isinstance(item, PathProperties): - return item - if isinstance(item, Path): - path = PathProperties._from_generated(item) # pylint: disable=protected-access - return path - return item - - class LeaseProperties(BlobLeaseProperties): """DataLake Lease Properties. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index 833d748e7068..29ce0af59bef 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -80,6 +80,9 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) + self._datalake_client_for_blob_operation = DataLakeStorageClient(self._blob_client.url, + file_system_name, path_name, + pipeline=self._pipeline) def __exit__(self, *args): self._blob_client.close() @@ -746,7 +749,6 @@ def _get_path_properties(self, **kwargs): :caption: Getting the properties for a file/directory. """ path_properties = self._blob_client.get_blob_properties(**kwargs) - path_properties.__class__ = DirectoryProperties return path_properties def set_metadata(self, metadata, # type: Dict[str, str] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py index a75979f07799..9d700bfb029f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py @@ -13,6 +13,14 @@ def convert_dfs_url_to_blob_url(dfs_account_url): return dfs_account_url.replace('.dfs.', '.blob.', 1) +def convert_datetime_to_rfc1123(date): + weekday = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][date.weekday()] + month = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", + "Oct", "Nov", "Dec"][date.month - 1] + return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (weekday, date.day, month, + date.year, date.hour, date.minute, date.second) + + def add_metadata_headers(metadata=None): # type: (Optional[Dict[str, str]]) -> str headers = list() diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py index c6a7dcb3ec29..68956da97364 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py @@ -11,6 +11,7 @@ from ._data_lake_file_client_async import DataLakeFileClient from .._data_lake_directory_client import DataLakeDirectoryClient as DataLakeDirectoryClientBase from .._models import DirectoryProperties +from .._deserialize import deserialize_dir_properties from ._path_client_async import PathClient @@ -203,8 +204,7 @@ async def get_directory_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file/directory. """ - blob_properties = await self._get_path_properties(**kwargs) - return DirectoryProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return await self._get_path_properties(cls=deserialize_dir_properties, **kwargs) # pylint: disable=protected-access async def rename_directory(self, new_name, # type: str **kwargs): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index dc1c69384c68..d075575dc8c6 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -13,7 +13,8 @@ from ._download_async import StorageStreamDownloader from ._path_client_async import PathClient from .._data_lake_file_client import DataLakeFileClient as DataLakeFileClientBase -from .._deserialize import process_storage_error +from .._serialize import convert_datetime_to_rfc1123 +from .._deserialize import process_storage_error, deserialize_file_properties from .._generated.models import StorageErrorException from .._models import FileProperties from ..aio._upload_helper import upload_datalake_file @@ -207,8 +208,30 @@ async def get_file_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file. """ - blob_properties = await self._get_path_properties(**kwargs) - return FileProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return await self._get_path_properties(cls=deserialize_file_properties, **kwargs) # pylint: disable=protected-access + + async def set_file_expiry(self, expiry_options, # type: str + expires_on=None, # type: Optional[Union[datetime, int]] + **kwargs): + # type: (str, Optional[Union[datetime, int]], **Any) -> None + """Sets the time a file will expire and be deleted. + + :param str expiry_options: + Required. Indicates mode of the expiry time. + Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' + :param datetime or int expires_on: + The time to set the file to expiry. + When expiry_options is RelativeTo*, expires_on should be an int in milliseconds + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + try: + expires_on = convert_datetime_to_rfc1123(expires_on) + except AttributeError: + expires_on = str(expires_on) + await self._datalake_client_for_blob_operation.path.set_expiry(expiry_options, expires_on=expires_on, + **kwargs) # pylint: disable=protected-access async def upload_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] length=None, # type: Optional[int] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py index 2fda96f2b6fd..ea27438b19da 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py @@ -3,8 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from .._models import FileProperties +from .._deserialize import from_blob_properties class StorageStreamDownloader(object): @@ -23,7 +22,7 @@ class StorageStreamDownloader(object): def __init__(self, downloader): self._downloader = downloader self.name = self._downloader.name - self.properties = FileProperties._from_blob_properties(self._downloader.properties) # pylint: disable=protected-access + self.properties = from_blob_properties(self._downloader.properties) # pylint: disable=protected-access self.size = self._downloader.size def __len__(self): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index c28743244391..215207bba63f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -45,6 +45,9 @@ def __init__( **kwargs) self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) + self._datalake_client_for_blob_operation = DataLakeStorageClient(self._blob_client.url, + file_system_name, path_name, + pipeline=self._pipeline) self._loop = kwargs.get('loop', None) async def __aexit__(self, *args): @@ -568,7 +571,6 @@ async def _get_path_properties(self, **kwargs): :rtype: DirectoryProperties or FileProperties """ path_properties = await self._blob_client.get_blob_properties(**kwargs) - path_properties.__class__ = DirectoryProperties return path_properties async def set_metadata(self, metadata, # type: Dict[str, str] diff --git a/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml new file mode 100644 index 000000000000..f21edffa71c6 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml @@ -0,0 +1,208 @@ +interactions: +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 08a85f0a-9f22-11ea-b31d-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-properties: + - '' + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem84ed0a59/directory84ed0a59?resource=directory + response: + body: + string: '' + headers: + Content-Length: + - '0' + Date: + - Tue, 26 May 2020 07:25:11 GMT + ETag: + - '"0x8D80145ED25E619"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: + - 9388e921-901f-0066-392e-330280000000 + x-ms-version: + - '2019-12-12' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 08fa93dc-9f22-11ea-bf47-001a7dda7113 + x-ms-content-disposition: + - inline + x-ms-content-language: + - spanish + x-ms-date: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-properties: + - hello=d29ybGQ=,number=NDI= + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem84ed0a59/directory84ed0a59%2Fnewfile?resource=file + response: + body: + string: '' + headers: + Content-Length: + - '0' + Date: + - Tue, 26 May 2020 07:25:11 GMT + ETag: + - '"0x8D80145ED335B0A"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: + - 9388e922-901f-0066-3a2e-330280000000 + x-ms-version: + - '2019-12-12' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 09086e8c-9f22-11ea-bd32-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-expiry-option: + - Absolute + x-ms-expiry-time: + - Tue, 26 May 2020 08:25:11 GMT + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.blob.core.windows.net/filesystem84ed0a59/directory84ed0a59/newfile?comp=expiry + response: + body: + string: '' + headers: + Content-Length: + - '0' + Date: + - Tue, 26 May 2020 07:25:12 GMT + ETag: + - '"0x8D80145ED335B0A"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: + - 8528af15-701e-009a-2a2e-33d379000000 + x-ms-version: + - '2019-12-12' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 098639e8-9f22-11ea-8208-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:12 GMT + x-ms-version: + - '2019-12-12' + method: HEAD + uri: https://storagename.blob.core.windows.net/filesystem84ed0a59/directory84ed0a59/newfile + response: + body: + string: '' + headers: + Accept-Ranges: + - bytes + Content-Disposition: + - inline + Content-Language: + - spanish + Content-Length: + - '0' + Content-Type: + - application/octet-stream + Date: + - Tue, 26 May 2020 07:25:12 GMT + ETag: + - '"0x8D80145ED335B0A"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-access-tier: + - Hot + x-ms-access-tier-inferred: + - 'true' + x-ms-blob-type: + - BlockBlob + x-ms-creation-time: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-expiry-time: + - Tue, 26 May 2020 08:25:11 GMT + x-ms-lease-state: + - available + x-ms-lease-status: + - unlocked + x-ms-meta-hello: + - world + x-ms-meta-number: + - '42' + x-ms-request-id: + - 8528af6d-701e-009a-792e-33d379000000 + x-ms-server-encrypted: + - 'true' + x-ms-version: + - '2019-12-12' + status: + code: 200 + message: OK +version: 1 diff --git a/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml new file mode 100644 index 000000000000..eb65df676e12 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml @@ -0,0 +1,140 @@ +interactions: +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 107e1ee6-9f22-11ea-b27b-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:24 GMT + x-ms-properties: + - '' + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem217a0f53/directory217a0f53?resource=directory + response: + body: + string: '' + headers: + Content-Length: '0' + Date: Tue, 26 May 2020 07:25:23 GMT + Etag: '"0x8D80145F4DF209F"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: c7f29fe2-401f-0028-752e-332c08000000 + x-ms-version: '2019-12-12' + status: + code: 201 + message: Created + url: https://emilyhnseuap.dfs.core.windows.net/filesystem217a0f53/directory217a0f53?resource=directory +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 10b47fba-9f22-11ea-993d-001a7dda7113 + x-ms-content-disposition: + - inline + x-ms-content-language: + - spanish + x-ms-date: + - Tue, 26 May 2020 07:25:24 GMT + x-ms-properties: + - hello=d29ybGQ=,number=NDI= + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem217a0f53/directory217a0f53%2Fnewfile?resource=file + response: + body: + string: '' + headers: + Content-Length: '0' + Date: Tue, 26 May 2020 07:25:24 GMT + Etag: '"0x8D80145F4ECE3A1"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: c7f29fe3-401f-0028-762e-332c08000000 + x-ms-version: '2019-12-12' + status: + code: 201 + message: Created + url: https://emilyhnseuap.dfs.core.windows.net/filesystem217a0f53/directory217a0f53%2Fnewfile?resource=file +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 10c24164-9f22-11ea-8bbd-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:24 GMT + x-ms-expiry-option: + - Absolute + x-ms-expiry-time: + - Tue, 26 May 2020 08:25:24 GMT + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile?comp=expiry + response: + body: + string: '' + headers: + Content-Length: '0' + Date: Tue, 26 May 2020 07:25:25 GMT + Etag: '"0x8D80145F4ECE3A1"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: 5cbafaac-901e-0059-6d2e-33ca23000000 + x-ms-version: '2019-12-12' + status: + code: 200 + message: OK + url: https://emilyhnseuap.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile?comp=expiry +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 113b38ca-9f22-11ea-9baf-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:25 GMT + x-ms-version: + - '2019-12-12' + method: HEAD + uri: https://storagename.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile + response: + body: + string: '' + headers: + Accept-Ranges: bytes + Content-Disposition: inline + Content-Language: spanish + Content-Length: '0' + Content-Type: application/octet-stream + Date: Tue, 26 May 2020 07:25:25 GMT + Etag: '"0x8D80145F4ECE3A1"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-access-tier: Hot + x-ms-access-tier-inferred: 'true' + x-ms-blob-type: BlockBlob + x-ms-creation-time: Tue, 26 May 2020 07:25:24 GMT + x-ms-expiry-time: Tue, 26 May 2020 08:25:24 GMT + x-ms-lease-state: available + x-ms-lease-status: unlocked + x-ms-meta-hello: world + x-ms-meta-number: '42' + x-ms-request-id: 5cbafb24-901e-0059-502e-33ca23000000 + x-ms-server-encrypted: 'true' + x-ms-version: '2019-12-12' + status: + code: 200 + message: OK + url: https://emilyhnseuap.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile +version: 1 diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index 9f00d36a50db..0e09fdbb890f 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -603,6 +603,24 @@ def test_get_properties(self): self.assertEqual(properties.metadata['hello'], metadata['hello']) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) + @record + def test_set_expiry(self): + # Arrange + directory_client = self._create_directory_and_return_client() + + metadata = {'hello': 'world', 'number': '42'} + content_settings = ContentSettings( + content_language='spanish', + content_disposition='inline') + expires_on = datetime.utcnow() + timedelta(hours=1) + file_client = directory_client.create_file("newfile", metadata=metadata, content_settings=content_settings) + file_client.set_file_expiry("Absolute", expires_on=expires_on) + properties = file_client.get_file_properties() + + # Assert + self.assertTrue(properties) + self.assertIsNotNone(properties.expiry_time) + @record def test_rename_file_with_non_used_name(self): file_client = self._create_file_and_return_client() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py index ecdee164a215..0c3afa9a0c57 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py @@ -734,6 +734,28 @@ def test_get_properties_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_get_properties()) + async def _test_set_expiry(self): + # Arrange + directory_client = await self._create_directory_and_return_client() + + metadata = {'hello': 'world', 'number': '42'} + content_settings = ContentSettings( + content_language='spanish', + content_disposition='inline') + expires_on = datetime.utcnow() + timedelta(hours=1) + file_client = await directory_client.create_file("newfile", metadata=metadata, content_settings=content_settings) + await file_client.set_file_expiry("Absolute", expires_on=expires_on) + properties = await file_client.get_file_properties() + + # Assert + self.assertTrue(properties) + self.assertIsNotNone(properties.expiry_time) + + @record + def test_set_expiry_async(self): + loop = asyncio.get_event_loop() + loop.run_until_complete(self._test_set_expiry()) + async def _test_rename_file_with_non_used_name(self): file_client = await self._create_file_and_return_client() data_bytes = b"abc"