diff --git a/sdk/core/azure-core/CHANGELOG.md b/sdk/core/azure-core/CHANGELOG.md index 90e3b12f5520..7a2d0c81144d 100644 --- a/sdk/core/azure-core/CHANGELOG.md +++ b/sdk/core/azure-core/CHANGELOG.md @@ -1,7 +1,11 @@ # Release History -## 1.11.1 (Unreleased) +## 1.12.0 (Unreleased) +### Features + +- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec. +- Added `azure.core.serialization.NULL` sentinel value ## 1.11.0 (2021-02-08) diff --git a/sdk/core/azure-core/azure/core/_utils.py b/sdk/core/azure-core/azure/core/_utils.py new file mode 100644 index 000000000000..9178d4e5c7f1 --- /dev/null +++ b/sdk/core/azure-core/azure/core/_utils.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime + + +class _FixedOffset(datetime.tzinfo): + """Fixed offset in minutes east from UTC. + + Copy/pasted from Python doc + + :param int offset: offset in minutes + """ + + def __init__(self, offset): + self.__offset = datetime.timedelta(minutes=offset) + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = _FixedOffset(0) # type: ignore + + +def _convert_to_isoformat(date_time): + """Deserialize a date in RFC 3339 format to datetime object. + Check https://tools.ietf.org/html/rfc3339#section-5.8 for examples. + """ + if not date_time: + return None + if date_time[-1] == "Z": + delta = 0 + timestamp = date_time[:-1] + else: + timestamp = date_time[:-6] + sign, offset = date_time[-6], date_time[-5:] + delta = int(sign + offset[:1]) * 60 + int(sign + offset[-2:]) + + if delta == 0: + tzinfo = TZ_UTC + else: + try: + tzinfo = datetime.timezone(datetime.timedelta(minutes=delta)) + except AttributeError: + tzinfo = _FixedOffset(delta) + + try: + deserialized = datetime.datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%f") + except ValueError: + deserialized = datetime.datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S") + + deserialized = deserialized.replace(tzinfo=tzinfo) + return deserialized diff --git a/sdk/core/azure-core/azure/core/_version.py b/sdk/core/azure-core/azure/core/_version.py index 14d127f747d9..7643b787eff9 100644 --- a/sdk/core/azure-core/azure/core/_version.py +++ b/sdk/core/azure-core/azure/core/_version.py @@ -9,4 +9,4 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "1.11.1" +VERSION = "1.12.0" diff --git a/sdk/core/azure-core/azure/core/messaging.py b/sdk/core/azure-core/azure/core/messaging.py new file mode 100644 index 000000000000..9131a7b46d69 --- /dev/null +++ b/sdk/core/azure-core/azure/core/messaging.py @@ -0,0 +1,168 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import uuid +from base64 import b64decode +from datetime import datetime +from azure.core._utils import _convert_to_isoformat, TZ_UTC +from azure.core.serialization import NULL + +try: + from typing import TYPE_CHECKING, cast, Union +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import Any, Optional, Dict + + +__all__ = ["CloudEvent"] + + +class CloudEvent(object): # pylint:disable=too-many-instance-attributes + """Properties of the CloudEvent 1.0 Schema. + All required parameters must be populated in order to send to Azure. + + :param source: Required. Identifies the context in which an event happened. The combination of id and source must + be unique for each distinct event. If publishing to a domain topic, source must be the domain name. + :type source: str + :param type: Required. Type of event related to the originating occurrence. + :type type: str + :keyword data: Optional. Event data specific to the event type. + :type data: object + :keyword time: Optional. The time (in UTC) the event was generated. + :type time: ~datetime.datetime + :keyword dataschema: Optional. Identifies the schema that data adheres to. + :type dataschema: str + :keyword datacontenttype: Optional. Content type of data value. + :type datacontenttype: str + :keyword subject: Optional. This describes the subject of the event in the context of the event producer + (identified by source). + :type subject: str + :keyword specversion: Optional. The version of the CloudEvent spec. Defaults to "1.0" + :type specversion: str + :keyword id: Optional. An identifier for the event. The combination of id and source must be + unique for each distinct event. If not provided, a random UUID will be generated and used. + :type id: Optional[str] + :keyword extensions: Optional. A CloudEvent MAY include any number of additional context attributes + with distinct names represented as key - value pairs. Each extension must be alphanumeric, lower cased + and must not exceed the length of 20 characters. + :type extensions: Optional[Dict] + :ivar source: Identifies the context in which an event happened. The combination of id and source must + be unique for each distinct event. If publishing to a domain topic, source must be the domain name. + :vartype source: str + :ivar data: Event data specific to the event type. + :vartype data: object + :ivar type: Type of event related to the originating occurrence. + :vartype type: str + :ivar time: The time (in UTC) the event was generated. + :vartype time: ~datetime.datetime + :ivar dataschema: Identifies the schema that data adheres to. + :vartype dataschema: str + :ivar datacontenttype: Content type of data value. + :vartype datacontenttype: str + :ivar subject: This describes the subject of the event in the context of the event producer + (identified by source). + :vartype subject: str + :ivar specversion: Optional. The version of the CloudEvent spec. Defaults to "1.0" + :vartype specversion: str + :ivar id: An identifier for the event. The combination of id and source must be + unique for each distinct event. If not provided, a random UUID will be generated and used. + :vartype id: str + :ivar extensions: A CloudEvent MAY include any number of additional context attributes + with distinct names represented as key - value pairs. Each extension must be alphanumeric, lower cased + and must not exceed the length of 20 characters. + :vartype extensions: Dict + """ + + def __init__(self, source, type, **kwargs): # pylint: disable=redefined-builtin + # type: (str, str, **Any) -> None + self.source = source # type: str + self.type = type # type: str + self.specversion = kwargs.pop("specversion", "1.0") # type: Optional[str] + self.id = kwargs.pop("id", str(uuid.uuid4())) # type: Optional[str] + self.time = kwargs.pop("time", datetime.now(TZ_UTC)) # type: Optional[datetime] + + self.datacontenttype = kwargs.pop("datacontenttype", None) # type: Optional[str] + self.dataschema = kwargs.pop("dataschema", None) # type: Optional[str] + self.subject = kwargs.pop("subject", None) # type: Optional[str] + self.data = kwargs.pop("data", None) # type: Optional[object] + + try: + self.extensions = kwargs.pop("extensions") # type: Optional[Dict] + for key in self.extensions.keys(): # type:ignore # extensions won't be None here + if not key.islower() or not key.isalnum(): + raise ValueError( + "Extension attributes should be lower cased and alphanumeric." + ) + except KeyError: + self.extensions = None + + if kwargs: + remaining = ", ".join(kwargs.keys()) + raise ValueError( + "Unexpected keyword arguments {}. Any extension attributes must be passed explicitly using extensions." + .format(remaining) + ) + + def __repr__(self): + return "CloudEvent(source={}, type={}, specversion={}, id={}, time={})".format( + self.source, self.type, self.specversion, self.id, self.time + )[:1024] + + @classmethod + def from_dict(cls, event): + # type: (Dict) -> CloudEvent + """ + Returns the deserialized CloudEvent object when a dict is provided. + :param event: The dict representation of the event which needs to be deserialized. + :type event: dict + :rtype: CloudEvent + """ + kwargs = {} # type: Dict[Any, Any] + reserved_attr = [ + "data", + "data_base64", + "id", + "source", + "type", + "specversion", + "time", + "dataschema", + "datacontenttype", + "subject", + ] + + if "data" in event and "data_base64" in event: + raise ValueError( + "Invalid input. Only one of data and data_base64 must be present." + ) + + if "data" in event: + data = event.get("data") + kwargs["data"] = data if data is not None else NULL + elif "data_base64" in event: + kwargs["data"] = b64decode( + cast(Union[str, bytes], event.get("data_base64")) + ) + + for item in ["datacontenttype", "dataschema", "subject"]: + if item in event: + val = event.get(item) + kwargs[item] = val if val is not None else NULL + + extensions = {k: v for k, v in event.items() if k not in reserved_attr} + if extensions: + kwargs["extensions"] = extensions + + return cls( + id=event.get("id"), + source=event["source"], + type=event["type"], + specversion=event.get("specversion"), + time=_convert_to_isoformat(event.get("time")), + **kwargs + ) diff --git a/sdk/core/azure-core/azure/core/pipeline/policies/_utils.py b/sdk/core/azure-core/azure/core/pipeline/policies/_utils.py index 173f19869804..76ee690c1d8f 100644 --- a/sdk/core/azure-core/azure/core/pipeline/policies/_utils.py +++ b/sdk/core/azure-core/azure/core/pipeline/policies/_utils.py @@ -26,29 +26,7 @@ import datetime import email.utils from requests.structures import CaseInsensitiveDict - -class _FixedOffset(datetime.tzinfo): - """Fixed offset in minutes east from UTC. - - Copy/pasted from Python doc - - :param int offset: offset in minutes - """ - - def __init__(self, offset): - self.__offset = datetime.timedelta(minutes=offset) - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds()/3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) +from ..._utils import _FixedOffset def _parse_http_date(text): """Parse a HTTP date format into datetime.""" diff --git a/sdk/core/azure-core/azure/core/serialization.py b/sdk/core/azure-core/azure/core/serialization.py new file mode 100644 index 000000000000..c3422efa0c27 --- /dev/null +++ b/sdk/core/azure-core/azure/core/serialization.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +__all__ = ["NULL"] + +class _Null(object): + """To create a Falsy object + """ + def __bool__(self): + return False + + __nonzero__ = __bool__ # Python2 compatibility + + +NULL = _Null() +""" +A falsy sentinel object which is supposed to be used to specify attributes +with no data. This gets serialized to `null` on the wire. +""" diff --git a/sdk/core/azure-core/doc/azure.core.rst b/sdk/core/azure-core/doc/azure.core.rst index 26b0607f3ca4..36716ad49c62 100644 --- a/sdk/core/azure-core/doc/azure.core.rst +++ b/sdk/core/azure-core/doc/azure.core.rst @@ -41,6 +41,14 @@ azure.core.exceptions :members: :undoc-members: +azure.core.messaging +------------------- + +.. automodule:: azure.core.messaging + :members: + :undoc-members: + :inherited-members: + azure.core.paging ----------------- @@ -57,3 +65,10 @@ azure.core.settings :undoc-members: :inherited-members: +azure.core.serialization +------------------- + +.. automodule:: azure.core.serialization + :members: + :undoc-members: + :inherited-members: diff --git a/sdk/core/azure-core/tests/test_messaging_cloud_event.py b/sdk/core/azure-core/tests/test_messaging_cloud_event.py new file mode 100644 index 000000000000..c0a88b845488 --- /dev/null +++ b/sdk/core/azure-core/tests/test_messaging_cloud_event.py @@ -0,0 +1,282 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import logging +import sys +import os +import pytest +import json +import datetime + +from azure.core.messaging import CloudEvent +from azure.core.serialization import NULL + +# Cloud Event tests +def test_cloud_event_constructor(): + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + data='cloudevent' + ) + + assert event.specversion == '1.0' + assert event.time.__class__ == datetime.datetime + assert event.id is not None + assert event.source == 'Azure.Core.Sample' + assert event.data == 'cloudevent' + +def test_cloud_event_constructor_unexpected_keyword(): + with pytest.raises(ValueError) as e: + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + data='cloudevent', + unexpected_keyword="not allowed", + another_bad_kwarg="not allowed either" + ) + assert "unexpected_keyword" in e + assert "another_bad_kwarg" in e + +def test_cloud_event_constructor_blank_data(): + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + data='' + ) + + assert event.specversion == '1.0' + assert event.time.__class__ == datetime.datetime + assert event.id is not None + assert event.source == 'Azure.Core.Sample' + assert event.data == '' + +def test_cloud_event_constructor_NULL_data(): + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + data=NULL + ) + + assert event.data == NULL + assert event.data is NULL + +def test_cloud_event_constructor_none_data(): + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + data=None + ) + + assert event.data == None + +def test_cloud_event_constructor_missing_data(): + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + ) + + assert event.data == None + assert event.datacontenttype == None + assert event.dataschema == None + assert event.subject == None + +def test_cloud_storage_dict(): + cloud_storage_dict = { + "id":"a0517898-9fa4-4e70-b4a3-afda1dd68672", + "source":"/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.Storage/storageAccounts/{storage-account}", + "data":{ + "api":"PutBlockList", + "client_request_id":"6d79dbfb-0e37-4fc4-981f-442c9ca65760", + "request_id":"831e1650-001e-001b-66ab-eeb76e000000", + "e_tag":"0x8D4BCC2E4835CD0", + "content_type":"application/octet-stream", + "content_length":524288, + "blob_type":"BlockBlob", + "url":"https://oc2d2817345i60006.blob.core.windows.net/oc2d2817345i200097container/oc2d2817345i20002296blob", + "sequencer":"00000000000004420000000000028963", + "storage_diagnostics":{"batchId":"b68529f3-68cd-4744-baa4-3c0498ec19f0"} + }, + "type":"Microsoft.Storage.BlobCreated", + "time":"2021-02-18T20:18:10.53986Z", + "specversion":"1.0" + } + + event = CloudEvent.from_dict(cloud_storage_dict) + assert event.data == { + "api":"PutBlockList", + "client_request_id":"6d79dbfb-0e37-4fc4-981f-442c9ca65760", + "request_id":"831e1650-001e-001b-66ab-eeb76e000000", + "e_tag":"0x8D4BCC2E4835CD0", + "content_type":"application/octet-stream", + "content_length":524288, + "blob_type":"BlockBlob", + "url":"https://oc2d2817345i60006.blob.core.windows.net/oc2d2817345i200097container/oc2d2817345i20002296blob", + "sequencer":"00000000000004420000000000028963", + "storage_diagnostics":{"batchId":"b68529f3-68cd-4744-baa4-3c0498ec19f0"} + } + assert event.specversion == "1.0" + assert event.time.__class__ == datetime.datetime + assert event.time.month == 2 + assert event.time.day == 18 + assert event.time.hour == 20 + assert event.__class__ == CloudEvent + assert "id" in cloud_storage_dict + assert "data" in cloud_storage_dict + + +def test_cloud_custom_dict_with_extensions(): + cloud_custom_dict_with_extensions = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "data":{"team": "event grid squad"}, + "type":"Azure.Sdk.Sample", + "time":"2021-02-18T20:18:10.53986+00:00", + "specversion":"1.0", + "ext1": "example", + "ext2": "example2" + } + event = CloudEvent.from_dict(cloud_custom_dict_with_extensions) + assert event.data == {"team": "event grid squad"} + assert event.__class__ == CloudEvent + assert event.time.month == 2 + assert event.time.day == 18 + assert event.time.hour == 20 + assert event.extensions == {"ext1": "example", "ext2": "example2"} + +def test_cloud_custom_dict_blank_data(): + cloud_custom_dict_with_extensions = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "data":'', + "type":"Azure.Sdk.Sample", + "time":"2021-02-18T20:18:10+00:00", + "specversion":"1.0", + } + event = CloudEvent.from_dict(cloud_custom_dict_with_extensions) + assert event.data == '' + assert event.__class__ == CloudEvent + +def test_cloud_custom_dict_no_data(): + cloud_custom_dict_with_missing_data = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "type":"Azure.Sdk.Sample", + "time":"2021-02-18T20:18:10+00:00", + "specversion":"1.0", + } + event = CloudEvent.from_dict(cloud_custom_dict_with_missing_data) + assert event.__class__ == CloudEvent + assert event.data is None + +def test_cloud_custom_dict_null_data(): + cloud_custom_dict_with_none_data = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "type":"Azure.Sdk.Sample", + "data":None, + "dataschema":None, + "time":"2021-02-18T20:18:10+00:00", + "specversion":"1.0", + } + event = CloudEvent.from_dict(cloud_custom_dict_with_none_data) + assert event.__class__ == CloudEvent + assert event.data == NULL + assert event.dataschema is NULL + +def test_cloud_custom_dict_valid_optional_attrs(): + cloud_custom_dict_with_none_data = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "type":"Azure.Sdk.Sample", + "data":None, + "dataschema":"exists", + "time":"2021-02-18T20:18:10+00:00", + "specversion":"1.0", + } + event = CloudEvent.from_dict(cloud_custom_dict_with_none_data) + assert event.__class__ == CloudEvent + assert event.data is NULL + assert event.dataschema == "exists" + +def test_cloud_custom_dict_both_data_and_base64(): + cloud_custom_dict_with_data_and_base64 = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "data":"abc", + "data_base64":"Y2Wa==", + "type":"Azure.Sdk.Sample", + "time":"2021-02-18T20:18:10+00:00", + "specversion":"1.0", + } + with pytest.raises(ValueError): + event = CloudEvent.from_dict(cloud_custom_dict_with_data_and_base64) + +def test_cloud_custom_dict_base64(): + cloud_custom_dict_base64 = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "data_base64":'Y2xvdWRldmVudA==', + "type":"Azure.Sdk.Sample", + "time":"2021-02-23T17:11:13.308772-08:00", + "specversion":"1.0" + } + event = CloudEvent.from_dict(cloud_custom_dict_base64) + assert event.data == b'cloudevent' + assert event.specversion == "1.0" + assert event.time.hour == 17 + assert event.time.minute == 11 + assert event.time.day == 23 + assert event.time.tzinfo is not None + assert event.__class__ == CloudEvent + +def test_data_and_base64_both_exist_raises(): + with pytest.raises(ValueError): + CloudEvent.from_dict( + {"source":'sample', + "type":'type', + "data":'data', + "data_base64":'Y2kQ==' + } + ) + +def test_cloud_event_repr(): + event = CloudEvent( + source='Azure.Core.Sample', + type='SampleType', + data='cloudevent' + ) + + assert repr(event).startswith("CloudEvent(source=Azure.Core.Sample, type=SampleType, specversion=1.0,") + +def test_extensions_upper_case_value_error(): + with pytest.raises(ValueError): + event = CloudEvent( + source='sample', + type='type', + data='data', + extensions={"lowercase123": "accepted", "NOTlower123": "not allowed"} + ) + +def test_extensions_not_alphanumeric_value_error(): + with pytest.raises(ValueError): + event = CloudEvent( + source='sample', + type='type', + data='data', + extensions={"lowercase123": "accepted", "not@lph@nu^^3ic": "not allowed"} + ) + +def test_cloud_from_dict_with_invalid_extensions(): + cloud_custom_dict_with_extensions = { + "id":"de0fd76c-4ef4-4dfb-ab3a-8f24a307e033", + "source":"https://egtest.dev/cloudcustomevent", + "data":{"team": "event grid squad"}, + "type":"Azure.Sdk.Sample", + "time":"2020-08-07T02:06:08.11969Z", + "specversion":"1.0", + "ext1": "example", + "BADext2": "example2" + } + with pytest.raises(ValueError): + event = CloudEvent.from_dict(cloud_custom_dict_with_extensions) diff --git a/sdk/core/azure-core/tests/test_serialization.py b/sdk/core/azure-core/tests/test_serialization.py new file mode 100644 index 000000000000..7ac58850cd91 --- /dev/null +++ b/sdk/core/azure-core/tests/test_serialization.py @@ -0,0 +1,11 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from azure.core.serialization import NULL + +def test_NULL_is_falsy(): + assert NULL is not False + assert bool(NULL) is False + assert NULL is NULL \ No newline at end of file