From 5e47fcd9fd4da8f4257da9acbfb4dcb32795c982 Mon Sep 17 00:00:00 2001 From: Scott Ernst Date: Thu, 22 Jun 2023 10:06:15 -0500 Subject: [PATCH] Introduce to_dict Introduce `to_dict` to the objects included in the existing JSON serialization process for `ReadableSpan`, `MetricsData`, `LogRecord`, and `Resource` objects. This includes adding `to_dict` to objects that are included within the serialized data structures of these objects. In places where `repr()` serialization was used, it has been replaced by a JSON-compatible serialization instead. Inconsistencies between null and empty string values were preserved, but in cases where attributes are optional, an empty dictionary is provided as well to be more consistent with cases where attributes are not optional and an empty dictionary represents no attributes were specified on the containing object. These changes also included: 1. Dictionary typing was included for all the `to_dict` methods for clarity in subsequent usage. 2. `DataT` and `DataPointT` were did not include the exponential histogram types in point.py, and so those were added with new `to_json` and `to_dict` methods as well for consistency. It appears that the exponential types were added later and including them in the types might have been overlooked. Please let me know if that is a misunderstanding on my part. 3. OrderedDict was removed in a number of places associated with the existing `to_json` functionality given its redundancy for Python 3.7+ compatibility. I was assuming this was legacy code for previous compatibility, but please let me know if that's not the case as well. 4. `to_dict` was added to objects like `SpanContext`, `Link`, and `Event` that were previously being serialized by static methods within the `ReadableSpan` class and accessing private/protected members. This simplified the serialization in the `ReadableSpan` class and those methods were removed. However, once again, let me know if there was a larger purpose to those I could not find. Finally, I used `to_dict` as the method names here to be consistent with other related usages. For example, `dataclasses.asdict()`. But, mostly because that was by far the most popular usage within the larger community: 328k files found on GitHub that define `to_dict` functions, which include some of the most popular Python libraries to date: https://github.com/search?q=%22def+to_dict%28%22+language%3APython&type=code&p=1&l=Python versus 3.3k files found on GitHub that define `to_dictionary` functions: https://github.com/search?q=%22def+to_dictionary%28%22+language%3APython&type=code&l=Python However, if there is a preference for this library to use `to_dictionary` instead let me know and I will adjust. --- .../src/opentelemetry/trace/__init__.py | 14 + .../src/opentelemetry/trace/span.py | 15 + .../src/opentelemetry/trace/status.py | 14 + .../sdk/_logs/_internal/__init__.py | 59 ++-- .../sdk/metrics/_internal/point.py | 313 +++++++++++++----- .../opentelemetry/sdk/resources/__init__.py | 21 +- .../src/opentelemetry/sdk/trace/__init__.py | 135 ++++---- .../opentelemetry/sdk/util/instrumentation.py | 25 +- .../tests/logs/test_log_record.py | 8 +- opentelemetry-sdk/tests/trace/test_trace.py | 26 +- 10 files changed, 424 insertions(+), 206 deletions(-) diff --git a/opentelemetry-api/src/opentelemetry/trace/__init__.py b/opentelemetry-api/src/opentelemetry/trace/__init__.py index 304df227542..48880d5eae7 100644 --- a/opentelemetry-api/src/opentelemetry/trace/__init__.py +++ b/opentelemetry-api/src/opentelemetry/trace/__init__.py @@ -103,6 +103,7 @@ NonRecordingSpan, Span, SpanContext, + SpanContextDict, TraceFlags, TraceState, format_span_id, @@ -130,6 +131,13 @@ def attributes(self) -> types.Attributes: pass +class LinkDict(typing.TypedDict): + """Dictionary representation of a span Link.""" + + context: SpanContextDict + attributes: types.Attributes + + class Link(_LinkBase): """A link to a `Span`. The attributes of a Link are immutable. @@ -152,6 +160,12 @@ def __init__( def attributes(self) -> types.Attributes: return self._attributes + def to_dict(self) -> LinkDict: + return { + "context": self.context.to_dict(), + "attributes": dict(self._attributes), + } + _Links = Optional[Sequence[Link]] diff --git a/opentelemetry-api/src/opentelemetry/trace/span.py b/opentelemetry-api/src/opentelemetry/trace/span.py index 805b2b06b18..cfc88274926 100644 --- a/opentelemetry-api/src/opentelemetry/trace/span.py +++ b/opentelemetry-api/src/opentelemetry/trace/span.py @@ -403,6 +403,14 @@ def values(self) -> typing.ValuesView[str]: _SPAN_ID_MAX_VALUE = 2**64 - 1 +class SpanContextDict(typing.TypedDict): + """Dictionary representation of a SpanContext.""" + + trace_id: str + span_id: str + trace_state: typing.Dict[str, str] + + class SpanContext( typing.Tuple[int, int, bool, "TraceFlags", "TraceState", bool] ): @@ -477,6 +485,13 @@ def trace_state(self) -> "TraceState": def is_valid(self) -> bool: return self[5] # pylint: disable=unsubscriptable-object + def to_dict(self) -> SpanContextDict: + return { + "trace_id": f"0x{format_trace_id(self.trace_id)}", + "span_id": f"0x{format_span_id(self.span_id)}", + "trace_state": dict(self.trace_state), + } + def __setattr__(self, *args: str) -> None: _logger.debug( "Immutable type, ignoring call to set attribute", stack_info=True diff --git a/opentelemetry-api/src/opentelemetry/trace/status.py b/opentelemetry-api/src/opentelemetry/trace/status.py index ada7fa1ebda..067a1366c3c 100644 --- a/opentelemetry-api/src/opentelemetry/trace/status.py +++ b/opentelemetry-api/src/opentelemetry/trace/status.py @@ -32,6 +32,13 @@ class StatusCode(enum.Enum): """The operation contains an error.""" +class StatusDict(typing.TypedDict): + """Dictionary representation of a trace Status.""" + + status_code: str + description: typing.Optional[str] + + class Status: """Represents the status of a finished Span. @@ -80,3 +87,10 @@ def is_ok(self) -> bool: def is_unset(self) -> bool: """Returns true if unset, false otherwise.""" return self._status_code is StatusCode.UNSET + + def to_dict(self) -> StatusDict: + """Convert to a dictionary representation of the status.""" + return { + "status_code": str(self.status_code.name), + "description": self.description, + } diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py index d42bcaf680e..6859024c897 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py @@ -19,6 +19,7 @@ import logging import threading import traceback +import typing from os import environ from time import time_ns from typing import Any, Callable, Optional, Tuple, Union @@ -37,7 +38,7 @@ OTEL_ATTRIBUTE_COUNT_LIMIT, OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT, ) -from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.resources import Resource, ResourceDict from opentelemetry.sdk.util import ns_to_iso_str from opentelemetry.sdk.util.instrumentation import InstrumentationScope from opentelemetry.semconv.trace import SpanAttributes @@ -147,6 +148,20 @@ def _from_env_if_absent( ) +class LogRecordDict(typing.TypedDict): + """Dictionary representation of a LogRecord.""" + + body: typing.Optional[typing.Any] + severity_number: int + severity_text: typing.Optional[str] + attributes: Attributes + timestamp: typing.Optional[str] + trace_id: str + span_id: str + trace_flags: typing.Optional[int] + resource: typing.Optional[ResourceDict] + + class LogRecord(APILogRecord): """A LogRecord instance represents an event being logged. @@ -194,29 +209,27 @@ def __eq__(self, other: object) -> bool: return NotImplemented return self.__dict__ == other.__dict__ + def to_dict(self) -> LogRecordDict: + return { + "body": self.body, + "severity_number": self.severity_number.value + if self.severity_number is not None + else SeverityNumber.UNSPECIFIED.value, + "severity_text": self.severity_text, + "attributes": dict(self.attributes or {}), + "timestamp": ns_to_iso_str(self.timestamp), + "trace_id": f"0x{format_trace_id(self.trace_id)}" + if self.trace_id is not None + else "", + "span_id": f"0x{format_span_id(self.span_id)}" + if self.span_id is not None + else "", + "trace_flags": self.trace_flags, + "resource": self.resource.to_dict() if self.resource else None, + } + def to_json(self, indent=4) -> str: - return json.dumps( - { - "body": self.body, - "severity_number": repr(self.severity_number), - "severity_text": self.severity_text, - "attributes": dict(self.attributes) - if bool(self.attributes) - else None, - "timestamp": ns_to_iso_str(self.timestamp), - "trace_id": f"0x{format_trace_id(self.trace_id)}" - if self.trace_id is not None - else "", - "span_id": f"0x{format_span_id(self.span_id)}" - if self.span_id is not None - else "", - "trace_flags": self.trace_flags, - "resource": json.loads(self.resource.to_json()) - if self.resource - else None, - }, - indent=indent, - ) + return json.dumps(self.to_dict(), indent=indent) @property def dropped_attributes(self) -> int: diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py index cba37e7fdf1..abb8de32ad1 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py @@ -11,20 +11,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - # pylint: disable=unused-import - -from dataclasses import asdict, dataclass -from json import dumps, loads +import typing +from dataclasses import dataclass +from json import dumps from typing import Optional, Sequence, Union # This kind of import is needed to avoid Sphinx errors. import opentelemetry.sdk.metrics._internal -from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.resources import Resource, ResourceDict from opentelemetry.sdk.util.instrumentation import InstrumentationScope from opentelemetry.util.types import Attributes +class NumberDataPointDict(typing.TypedDict): + """Dictionary representation of a NumberDataPoint.""" + + attributes: Attributes + start_time_unix_nano: int + time_unix_nano: int + value: Union[int, float] + + @dataclass(frozen=True) class NumberDataPoint: """Single data point in a timeseries that describes the time-varying scalar @@ -36,8 +44,30 @@ class NumberDataPoint: time_unix_nano: int value: Union[int, float] + def to_dict(self) -> NumberDataPointDict: + return { + "attributes": dict(self.attributes), + "start_time_unix_nano": self.start_time_unix_nano, + "time_unix_nano": self.time_unix_nano, + "value": self.value, + } + def to_json(self, indent=4) -> str: - return dumps(asdict(self), indent=indent) + return dumps(self.to_dict(), indent=indent) + + +class HistogramDataPointDict(typing.TypedDict): + """Dictionary representation of a HistogramDataPoint.""" + + attributes: Attributes + start_time_unix_nano: int + time_unix_nano: int + count: int + sum: Union[int, float] + bucket_counts: typing.List[int] + explicit_bounds: typing.List[float] + min: float + max: float @dataclass(frozen=True) @@ -56,8 +86,28 @@ class HistogramDataPoint: min: float max: float + def to_dict(self) -> HistogramDataPointDict: + return { + "attributes": dict(self.attributes), + "start_time_unix_nano": self.start_time_unix_nano, + "time_unix_nano": self.time_unix_nano, + "count": self.count, + "sum": self.sum, + "bucket_counts": list(self.bucket_counts), + "explicit_bounds": list(self.explicit_bounds), + "min": self.min, + "max": self.max, + } + def to_json(self, indent=4) -> str: - return dumps(asdict(self), indent=indent) + return dumps(self.to_dict(), indent=indent) + + +class BucketsDict(typing.TypedDict): + """Dictionary representation of a Buckets object.""" + + offset: int + bucket_counts: typing.List[int] @dataclass(frozen=True) @@ -65,6 +115,29 @@ class Buckets: offset: int bucket_counts: Sequence[int] + def to_dict(self) -> BucketsDict: + return { + "offset": self.offset, + "bucket_counts": list(self.bucket_counts), + } + + +class ExponentialHistogramDataPointDict(typing.TypedDict): + """Dictionary representation of an ExponentialHistogramDataPoint.""" + + attributes: Attributes + start_time_unix_nano: int + time_unix_nano: int + count: int + sum: Union[int, float] + scale: int + zero_count: int + positive: BucketsDict + negative: BucketsDict + flags: int + min: float + max: float + @dataclass(frozen=True) class ExponentialHistogramDataPoint: @@ -86,8 +159,31 @@ class ExponentialHistogramDataPoint: min: float max: float + def to_dict(self) -> ExponentialHistogramDataPointDict: + return { + "attributes": dict(self.attributes), + "start_time_unix_nano": self.start_time_unix_nano, + "time_unix_nano": self.time_unix_nano, + "count": self.count, + "sum": self.sum, + "scale": self.scale, + "zero_count": self.zero_count, + "positive": self.positive.to_dict(), + "negative": self.negative.to_dict(), + "flags": self.flags, + "min": self.min, + "max": self.max, + } + def to_json(self, indent=4) -> str: - return dumps(asdict(self), indent=indent) + return dumps(self.to_dict(), indent=indent) + + +class ExponentialHistogramDict(typing.TypedDict): + """Dictionary representation of an ExponentialHistogram.""" + + data_points: typing.List[ExponentialHistogramDataPointDict] + aggregation_temporality: int @dataclass(frozen=True) @@ -101,6 +197,22 @@ class ExponentialHistogram: "opentelemetry.sdk.metrics.export.AggregationTemporality" ) + def to_dict(self) -> ExponentialHistogramDict: + return { + "data_points": [ + data_point.to_dict() for data_point in self.data_points + ], + "aggregation_temporality": self.aggregation_temporality.value, + } + + +class SumDict(typing.TypedDict): + """Dictionary representation of a Sum.""" + + data_points: typing.List[NumberDataPointDict] + aggregation_temporality: int + is_monotonic: bool + @dataclass(frozen=True) class Sum: @@ -113,18 +225,23 @@ class Sum: ) is_monotonic: bool + def to_dict(self) -> SumDict: + return { + "data_points": [ + data_point.to_dict() for data_point in self.data_points + ], + "aggregation_temporality": self.aggregation_temporality.value, + "is_monotonic": self.is_monotonic, + } + def to_json(self, indent=4) -> str: - return dumps( - { - "data_points": [ - loads(data_point.to_json(indent=indent)) - for data_point in self.data_points - ], - "aggregation_temporality": self.aggregation_temporality, - "is_monotonic": self.is_monotonic, - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) + + +class GaugeDict(typing.TypedDict): + """Dictionary representation of a Gauge.""" + + data_points: typing.List[NumberDataPointDict] @dataclass(frozen=True) @@ -135,16 +252,22 @@ class Gauge: data_points: Sequence[NumberDataPoint] + def to_dict(self) -> GaugeDict: + return { + "data_points": [ + data_point.to_dict() for data_point in self.data_points + ], + } + def to_json(self, indent=4) -> str: - return dumps( - { - "data_points": [ - loads(data_point.to_json(indent=indent)) - for data_point in self.data_points - ], - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) + + +class HistogramDict(typing.TypedDict): + """Dictionary representation of a Histogram.""" + + data_points: typing.List[HistogramDataPointDict] + aggregation_temporality: int @dataclass(frozen=True) @@ -157,21 +280,33 @@ class Histogram: "opentelemetry.sdk.metrics.export.AggregationTemporality" ) + def to_dict(self) -> HistogramDict: + return { + "data_points": [ + data_point.to_dict() for data_point in self.data_points + ], + "aggregation_temporality": self.aggregation_temporality.value, + } + def to_json(self, indent=4) -> str: - return dumps( - { - "data_points": [ - loads(data_point.to_json(indent=indent)) - for data_point in self.data_points - ], - "aggregation_temporality": self.aggregation_temporality, - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) -DataT = Union[Sum, Gauge, Histogram] -DataPointT = Union[NumberDataPoint, HistogramDataPoint] +DataT = Union[Sum, Gauge, Histogram, ExponentialHistogram] +DataPointT = Union[ + NumberDataPoint, HistogramDataPoint, ExponentialHistogramDataPoint +] + + +class MetricDict(typing.TypedDict): + """Dictionary representation of a Metric.""" + + name: str + description: str + unit: str + data: typing.Union[ + SumDict, GaugeDict, HistogramDict, ExponentialHistogramDict + ] @dataclass(frozen=True) @@ -184,16 +319,24 @@ class Metric: unit: Optional[str] data: DataT + def to_dict(self) -> MetricDict: + return { + "name": self.name, + "description": self.description or "", + "unit": self.unit or "", + "data": self.data.to_dict(), + } + def to_json(self, indent=4) -> str: - return dumps( - { - "name": self.name, - "description": self.description or "", - "unit": self.unit or "", - "data": loads(self.data.to_json(indent=indent)), - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) + + +class ScopeMetricsDict(typing.TypedDict): + """Dictionary representation of a ScopeMetrics object.""" + + scope: typing.Any + metrics: typing.List[typing.Any] + schema_url: str @dataclass(frozen=True) @@ -204,18 +347,23 @@ class ScopeMetrics: metrics: Sequence[Metric] schema_url: str + def to_dict(self) -> ScopeMetricsDict: + return { + "scope": self.scope.to_dict(), + "metrics": [metric.to_dict() for metric in self.metrics], + "schema_url": self.schema_url, + } + def to_json(self, indent=4) -> str: - return dumps( - { - "scope": loads(self.scope.to_json(indent=indent)), - "metrics": [ - loads(metric.to_json(indent=indent)) - for metric in self.metrics - ], - "schema_url": self.schema_url, - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) + + +class ResourceMetricsDict(typing.TypedDict): + """Dictionary representation of a ResourceMetrics object.""" + + resource: ResourceDict + scope_metrics: typing.List[ScopeMetricsDict] + schema_url: str @dataclass(frozen=True) @@ -226,18 +374,23 @@ class ResourceMetrics: scope_metrics: Sequence[ScopeMetrics] schema_url: str + def to_dict(self) -> ResourceMetricsDict: + return { + "resource": self.resource.to_dict(), + "scope_metrics": [ + scope_metrics.to_dict() for scope_metrics in self.scope_metrics + ], + "schema_url": self.schema_url, + } + def to_json(self, indent=4) -> str: - return dumps( - { - "resource": loads(self.resource.to_json(indent=indent)), - "scope_metrics": [ - loads(scope_metrics.to_json(indent=indent)) - for scope_metrics in self.scope_metrics - ], - "schema_url": self.schema_url, - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) + + +class MetricsDataDict(typing.TypedDict): + """Dictionary representation of a MetricsData object.""" + + resource_metrics: typing.List[ResourceMetricsDict] @dataclass(frozen=True) @@ -246,13 +399,13 @@ class MetricsData: resource_metrics: Sequence[ResourceMetrics] + def to_dict(self) -> MetricsDataDict: + return { + "resource_metrics": [ + resource_metrics.to_dict() + for resource_metrics in self.resource_metrics + ] + } + def to_json(self, indent=4) -> str: - return dumps( - { - "resource_metrics": [ - loads(resource_metrics.to_json(indent=indent)) - for resource_metrics in self.resource_metrics - ] - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py index dd5bea43d4c..3fd639da1aa 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py @@ -139,6 +139,13 @@ _OPENTELEMETRY_SDK_VERSION = version("opentelemetry-sdk") +class ResourceDict(typing.TypedDict): + """Dictionary representation of a Resource.""" + + attributes: Attributes + schema_url: typing.Optional[str] + + class Resource: """A Resource is an immutable representation of the entity producing telemetry as Attributes.""" @@ -269,14 +276,14 @@ def __hash__(self): f"{dumps(self._attributes.copy(), sort_keys=True)}|{self._schema_url}" ) + def to_dict(self) -> ResourceDict: + return { + "attributes": dict(self._attributes), + "schema_url": self._schema_url, + } + def to_json(self, indent=4) -> str: - return dumps( - { - "attributes": dict(self._attributes), - "schema_url": self._schema_url, - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) _EMPTY_RESOURCE = Resource({}) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py index 2d6147a4365..34d12bb0df8 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py @@ -21,7 +21,6 @@ import threading import traceback import typing -from collections import OrderedDict from contextlib import contextmanager from os import environ from time import time_ns @@ -55,7 +54,7 @@ OTEL_SPAN_EVENT_COUNT_LIMIT, OTEL_SPAN_LINK_COUNT_LIMIT, ) -from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.resources import Resource, ResourceDict from opentelemetry.sdk.trace import sampling from opentelemetry.sdk.trace.id_generator import IdGenerator, RandomIdGenerator from opentelemetry.sdk.util import BoundedList @@ -63,8 +62,8 @@ InstrumentationInfo, InstrumentationScope, ) -from opentelemetry.trace import SpanContext -from opentelemetry.trace.status import Status, StatusCode +from opentelemetry.trace import SpanContext, SpanContextDict +from opentelemetry.trace.status import Status, StatusCode, StatusDict from opentelemetry.util import types logger = logging.getLogger(__name__) @@ -302,6 +301,14 @@ def attributes(self) -> types.Attributes: pass +class EventDict(typing.TypedDict): + """Dictionary representation of a span Event.""" + + name: str + timestamp: str + attributes: types.Attributes + + class Event(EventBase): """A text annotation with a set of attributes. The attributes of an event are immutable. @@ -327,6 +334,13 @@ def __init__( def attributes(self) -> types.Attributes: return self._attributes + def to_dict(self) -> EventDict: + return { + "name": self.name, + "timestamp": util.ns_to_iso_str(self.timestamp), + "attributes": dict(self._attributes), + } + def _check_span_ended(func): def wrapper(self, *args, **kwargs): @@ -343,6 +357,22 @@ def wrapper(self, *args, **kwargs): return wrapper +class ReadableSpanDict(typing.TypedDict): + """Dictionary representation of a ReadableSpan.""" + + name: typing.Optional[str] + context: SpanContextDict + kind: str + parent_id: typing.Optional[str] + start_time: typing.Optional[str] + end_time: typing.Optional[str] + status: typing.Optional[StatusDict] + attributes: types.Attributes + events: typing.List[EventDict] + links: typing.List[trace_api.LinkDict] + resource: ResourceDict + + class ReadableSpan: """Provides read-only access to span attributes""" @@ -452,8 +482,8 @@ def instrumentation_info(self) -> InstrumentationInfo: def instrumentation_scope(self) -> InstrumentationScope: return self._instrumentation_scope - def to_json(self, indent=4): - parent_id = None + def to_dict(self) -> ReadableSpanDict: + parent_id: typing.Optional[str] = None if self.parent is not None: if isinstance(self.parent, Span): ctx = self.parent.context @@ -463,85 +493,34 @@ def to_json(self, indent=4): f"0x{trace_api.format_span_id(self.parent.span_id)}" ) - start_time = None + start_time: typing.Optional[str] = None if self._start_time: start_time = util.ns_to_iso_str(self._start_time) - end_time = None + end_time: typing.Optional[str] = None if self._end_time: end_time = util.ns_to_iso_str(self._end_time) + status: typing.Optional[StatusDict] = None if self._status is not None: - status = OrderedDict() - status["status_code"] = str(self._status.status_code.name) - if self._status.description: - status["description"] = self._status.description - - f_span = OrderedDict() - - f_span["name"] = self._name - f_span["context"] = self._format_context(self._context) - f_span["kind"] = str(self.kind) - f_span["parent_id"] = parent_id - f_span["start_time"] = start_time - f_span["end_time"] = end_time - if self._status is not None: - f_span["status"] = status - f_span["attributes"] = self._format_attributes(self._attributes) - f_span["events"] = self._format_events(self._events) - f_span["links"] = self._format_links(self._links) - f_span["resource"] = json.loads(self.resource.to_json()) - - return json.dumps(f_span, indent=indent) - - @staticmethod - def _format_context(context): - x_ctx = OrderedDict() - x_ctx["trace_id"] = f"0x{trace_api.format_trace_id(context.trace_id)}" - x_ctx["span_id"] = f"0x{trace_api.format_span_id(context.span_id)}" - x_ctx["trace_state"] = repr(context.trace_state) - return x_ctx - - @staticmethod - def _format_attributes(attributes): - if isinstance(attributes, BoundedAttributes): - return attributes._dict # pylint: disable=protected-access - if isinstance(attributes, MappingProxyType): - return attributes.copy() - return attributes - - @staticmethod - def _format_events(events): - f_events = [] - for event in events: - f_event = OrderedDict() - f_event["name"] = event.name - f_event["timestamp"] = util.ns_to_iso_str(event.timestamp) - f_event[ - "attributes" - ] = Span._format_attributes( # pylint: disable=protected-access - event.attributes - ) - f_events.append(f_event) - return f_events - - @staticmethod - def _format_links(links): - f_links = [] - for link in links: - f_link = OrderedDict() - f_link[ - "context" - ] = Span._format_context( # pylint: disable=protected-access - link.context - ) - f_link[ - "attributes" - ] = Span._format_attributes( # pylint: disable=protected-access - link.attributes - ) - f_links.append(f_link) - return f_links + status = self.status.to_dict() + + return { + "name": self._name, + "context": self._context.to_dict(), + "kind": str(self.kind), + "parent_id": parent_id, + "start_time": start_time, + "end_time": end_time, + "status": status, + "attributes": dict(self._attributes), + "events": [event.to_dict() for event in self._events], + "links": [link.to_dict() for link in self._links], + "resource": self.resource.to_dict(), + } + + def to_json(self, indent=4): + return json.dumps(self.to_dict(), indent=indent) class SpanLimits: diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py b/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py index 085d3fd874f..96a45588217 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import typing from json import dumps from typing import Optional @@ -74,6 +75,14 @@ def name(self) -> str: return self._name +class InstrumentationScopeDict(typing.TypedDict): + """Dictionary representation of an InstrumentationScope.""" + + name: str + version: typing.Optional[str] + schema_url: typing.Optional[str] + + class InstrumentationScope: """A logical unit of the application code with which the emitted telemetry can be associated. @@ -132,12 +141,12 @@ def version(self) -> Optional[str]: def name(self) -> str: return self._name + def to_dict(self) -> InstrumentationScopeDict: + return { + "name": self._name, + "version": self._version, + "schema_url": self._schema_url, + } + def to_json(self, indent=4) -> str: - return dumps( - { - "name": self._name, - "version": self._version, - "schema_url": self._schema_url, - }, - indent=indent, - ) + return dumps(self.to_dict(), indent=indent) diff --git a/opentelemetry-sdk/tests/logs/test_log_record.py b/opentelemetry-sdk/tests/logs/test_log_record.py index 897a3b5893a..33b63898ee1 100644 --- a/opentelemetry-sdk/tests/logs/test_log_record.py +++ b/opentelemetry-sdk/tests/logs/test_log_record.py @@ -25,9 +25,9 @@ def test_log_record_to_json(self): expected = json.dumps( { "body": "a log line", - "severity_number": "None", + "severity_number": None, "severity_text": None, - "attributes": None, + "attributes": {}, "timestamp": "1970-01-01T00:00:00.000000Z", "trace_id": "", "span_id": "", @@ -47,9 +47,9 @@ def test_log_record_to_json_with_resource(self): expected = json.dumps( { "body": "a log line", - "severity_number": "None", + "severity_number": None, "severity_text": None, - "attributes": None, + "attributes": {}, "timestamp": "1970-01-01T00:00:00.000000Z", "trace_id": "", "span_id": "", diff --git a/opentelemetry-sdk/tests/trace/test_trace.py b/opentelemetry-sdk/tests/trace/test_trace.py index c0b192192f6..dfc58b97b43 100644 --- a/opentelemetry-sdk/tests/trace/test_trace.py +++ b/opentelemetry-sdk/tests/trace/test_trace.py @@ -1345,10 +1345,15 @@ def test_to_json(self): is_remote=False, trace_flags=trace_api.TraceFlags(trace_api.TraceFlags.SAMPLED), ) - parent = trace._Span("parent-name", context, resource=Resource({})) + parent = trace._Span( + "parent-name", + context, + resource=Resource({"hello": "world"}), + ) span = trace._Span( "span-name", context, resource=Resource({}), parent=parent ) + span.add_event("foo", {"spam": "ham"}, 1234567890987654321) self.assertEqual( span.to_json(), @@ -1357,17 +1362,26 @@ def test_to_json(self): "context": { "trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", - "trace_state": "[]" + "trace_state": {} }, "kind": "SpanKind.INTERNAL", "parent_id": "0x00000000deadbef0", "start_time": null, "end_time": null, "status": { - "status_code": "UNSET" + "status_code": "UNSET", + "description": null }, "attributes": {}, - "events": [], + "events": [ + { + "name": "foo", + "timestamp": "2009-02-13T23:31:30.987654Z", + "attributes": { + "spam": "ham" + } + } + ], "links": [], "resource": { "attributes": {}, @@ -1377,7 +1391,7 @@ def test_to_json(self): ) self.assertEqual( span.to_json(indent=None), - '{"name": "span-name", "context": {"trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", "trace_state": "[]"}, "kind": "SpanKind.INTERNAL", "parent_id": "0x00000000deadbef0", "start_time": null, "end_time": null, "status": {"status_code": "UNSET"}, "attributes": {}, "events": [], "links": [], "resource": {"attributes": {}, "schema_url": ""}}', + '{"name": "span-name", "context": {"trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", "trace_state": {}}, "kind": "SpanKind.INTERNAL", "parent_id": "0x00000000deadbef0", "start_time": null, "end_time": null, "status": {"status_code": "UNSET", "description": null}, "attributes": {}, "events": [{"name": "foo", "timestamp": "2009-02-13T23:31:30.987654Z", "attributes": {"spam": "ham"}}], "links": [], "resource": {"attributes": {}, "schema_url": ""}}', ) def test_attributes_to_json(self): @@ -1393,7 +1407,7 @@ def test_attributes_to_json(self): date_str = ns_to_iso_str(123) self.assertEqual( span.to_json(indent=None), - '{"name": "span-name", "context": {"trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", "trace_state": "[]"}, "kind": "SpanKind.INTERNAL", "parent_id": null, "start_time": null, "end_time": null, "status": {"status_code": "UNSET"}, "attributes": {"key": "value"}, "events": [{"name": "event", "timestamp": "' + '{"name": "span-name", "context": {"trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", "trace_state": {}}, "kind": "SpanKind.INTERNAL", "parent_id": null, "start_time": null, "end_time": null, "status": {"status_code": "UNSET", "description": null}, "attributes": {"key": "value"}, "events": [{"name": "event", "timestamp": "' + date_str + '", "attributes": {"key2": "value2"}}], "links": [], "resource": {"attributes": {}, "schema_url": ""}}', )