Skip to content

Commit

Permalink
Upgrade google-cloud-trace dep to ~=1.1
Browse files Browse the repository at this point in the history
  • Loading branch information
aabmass committed Dec 16, 2021
1 parent e5f7d7a commit dba00c2
Show file tree
Hide file tree
Showing 6 changed files with 104 additions and 75 deletions.
3 changes: 1 addition & 2 deletions e2e-test-server/constraints.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@ click==7.1.2
Flask==1.1.2
google-api-core==1.26.3
google-auth==1.30.0
google-cloud-core==1.6.0
google-cloud-pubsub==2.4.1
google-cloud-trace==0.24.0
google-cloud-trace==1.5.0
googleapis-common-protos==1.53.0
grpc-google-iam-v1==0.12.3
grpcio==1.37.0
Expand Down
1 change: 1 addition & 0 deletions opentelemetry-exporter-gcp-trace/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

- Add optional resource attributes to trace spans with regex
([#145](https://github.com/GoogleCloudPlatform/opentelemetry-operations-python/pull/145))
- Upgrade `google-cloud-trace` dependency to version 1.1 or newer.

## Version 1.0.0

Expand Down
2 changes: 1 addition & 1 deletion opentelemetry-exporter-gcp-trace/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ package_dir=
=src
packages=find_namespace:
install_requires =
google-cloud-trace >=0.24.0, <1.0.0
google-cloud-trace ~= 1.1
opentelemetry-api ~= 1.0
opentelemetry-sdk ~= 1.0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,22 @@
import logging
import re
from collections.abc import Sequence as SequenceABC
from typing import Any, Dict, List, Optional, Pattern, Sequence, Tuple
from typing import (
Any,
Dict,
List,
Optional,
Pattern,
Sequence,
Tuple,
overload,
)

import google.auth
import opentelemetry.trace as trace_api
import pkg_resources
from google.cloud.trace_v2 import TraceServiceClient
from google.cloud.trace_v2.proto import trace_pb2
from google.cloud.trace_v2 import BatchWriteSpansRequest, TraceServiceClient
from google.cloud.trace_v2 import types as trace_types
from google.protobuf.timestamp_pb2 import Timestamp
from google.rpc import code_pb2, status_pb2
from opentelemetry.exporter.cloud_trace.version import __version__
Expand Down Expand Up @@ -99,7 +108,7 @@ def __init__(
client=None,
resource_regex=None,
):
self.client = client or TraceServiceClient()
self.client: TraceServiceClient = client or TraceServiceClient()
if not project_id:
_, self.project_id = google.auth.default()
else:
Expand All @@ -118,8 +127,10 @@ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
"""
try:
self.client.batch_write_spans(
"projects/{}".format(self.project_id),
self._translate_to_cloud_trace(spans),
request=BatchWriteSpansRequest(
name="projects/{}".format(self.project_id),
spans=self._translate_to_cloud_trace(spans),
)
)
# pylint: disable=broad-except
except Exception as ex:
Expand All @@ -130,14 +141,14 @@ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:

def _translate_to_cloud_trace(
self, spans: Sequence[ReadableSpan]
) -> List[Dict[str, Any]]:
) -> List[trace_types.Span]:
"""Translate the spans to Cloud Trace format.
Args:
spans: Sequence of spans to convert
"""

cloud_trace_spans = []
cloud_trace_spans: List[trace_types.Span] = []

for span in spans:
ctx = span.get_span_context()
Expand Down Expand Up @@ -168,25 +179,23 @@ def _translate_to_cloud_trace(
}

cloud_trace_spans.append(
{
"name": span_name,
"span_id": span_id,
"display_name": _get_truncatable_str_object(
span.name, 128
),
"start_time": start_time,
"end_time": end_time,
"parent_span_id": parent_id,
"attributes": _extract_attributes(
trace_types.Span(
name=span_name,
span_id=span_id,
display_name=_get_truncatable_str_object(span.name, 128),
start_time=start_time,
end_time=end_time,
parent_span_id=parent_id,
attributes=_extract_attributes(
resources_and_attrs,
MAX_SPAN_ATTRS,
add_agent_attr=True,
),
"links": _extract_links(span.links), # type: ignore[has-type]
"status": _extract_status(span.status), # type: ignore[arg-type]
"time_events": _extract_events(span.events),
"span_kind": _extract_span_kind(span.kind),
}
links=_extract_links(span.links), # type: ignore[has-type]
status=_extract_status(span.status), # type: ignore[arg-type]
time_events=_extract_events(span.events),
span_kind=_extract_span_kind(span.kind),
)
)
# TODO: Leverage more of the Cloud Trace API, e.g.
# same_process_as_parent_span and child_span_count
Expand All @@ -213,7 +222,7 @@ def _get_truncatable_str_object(str_to_convert: str, max_length: int):
truncated bytes count."""
truncated, truncated_byte_count = _truncate_str(str_to_convert, max_length)

return trace_pb2.TruncatableString(
return trace_types.TruncatableString(
value=truncated, truncated_byte_count=truncated_byte_count
)

Expand Down Expand Up @@ -248,11 +257,13 @@ def _extract_status(status: trace_api.Status) -> Optional[status_pb2.Status]:
return status_proto


def _extract_links(links: Sequence[trace_api.Link]) -> trace_pb2.Span.Links:
def _extract_links(
links: Sequence[trace_api.Link],
) -> Optional[trace_types.Span.Links]:
"""Convert span.links"""
if not links:
return None
extracted_links = []
extracted_links: List[trace_types.Span.Link] = []
dropped_links = 0
if len(links) > MAX_NUM_LINKS:
logger.warning(
Expand All @@ -271,25 +282,27 @@ def _extract_links(links: Sequence[trace_api.Link]) -> trace_pb2.Span.Links:
trace_id = format_trace_id(link.context.trace_id)
span_id = format_span_id(link.context.span_id)
extracted_links.append(
{
"trace_id": trace_id,
"span_id": span_id,
"type": "TYPE_UNSPECIFIED",
"attributes": _extract_attributes(
trace_types.Span.Link(
trace_id=trace_id,
span_id=span_id,
type="TYPE_UNSPECIFIED",
attributes=_extract_attributes(
link_attributes, MAX_LINK_ATTRS
),
}
)
)
return trace_pb2.Span.Links(
return trace_types.Span.Links(
link=extracted_links, dropped_links_count=dropped_links
)


def _extract_events(events: Sequence[Event]) -> trace_pb2.Span.TimeEvents:
def _extract_events(
events: Sequence[Event],
) -> Optional[trace_types.Span.TimeEvents]:
"""Convert span.events to dict."""
if not events:
return None
logs = []
time_events: List[trace_types.Span.TimeEvent] = []
dropped_annontations = 0
if len(events) > MAX_NUM_EVENTS:
logger.warning(
Expand All @@ -305,42 +318,40 @@ def _extract_events(events: Sequence[Event]) -> trace_pb2.Span.TimeEvents:
event.name,
MAX_EVENT_ATTRS,
)
logs.append(
{
"time": _get_time_from_ns(event.timestamp),
"annotation": {
"description": _get_truncatable_str_object(
event.name, 256
),
"attributes": _extract_attributes(
time_events.append(
trace_types.Span.TimeEvent(
time=_get_time_from_ns(event.timestamp),
annotation=trace_types.Span.TimeEvent.Annotation(
description=_get_truncatable_str_object(event.name, 256),
attributes=_extract_attributes(
event.attributes, MAX_EVENT_ATTRS
),
},
}
),
)
)
return trace_pb2.Span.TimeEvents(
time_event=logs,
return trace_types.Span.TimeEvents(
time_event=time_events,
dropped_annotations_count=dropped_annontations,
dropped_message_events_count=0,
)


# pylint: disable=no-member
SPAN_KIND_MAPPING = {
trace_api.SpanKind.INTERNAL: trace_pb2.Span.SpanKind.INTERNAL,
trace_api.SpanKind.CLIENT: trace_pb2.Span.SpanKind.CLIENT,
trace_api.SpanKind.SERVER: trace_pb2.Span.SpanKind.SERVER,
trace_api.SpanKind.PRODUCER: trace_pb2.Span.SpanKind.PRODUCER,
trace_api.SpanKind.CONSUMER: trace_pb2.Span.SpanKind.CONSUMER,
trace_api.SpanKind.INTERNAL: trace_types.Span.SpanKind.INTERNAL,
trace_api.SpanKind.CLIENT: trace_types.Span.SpanKind.CLIENT,
trace_api.SpanKind.SERVER: trace_types.Span.SpanKind.SERVER,
trace_api.SpanKind.PRODUCER: trace_types.Span.SpanKind.PRODUCER,
trace_api.SpanKind.CONSUMER: trace_types.Span.SpanKind.CONSUMER,
}


# pylint: disable=no-member
def _extract_span_kind(
span_kind: trace_api.SpanKind,
) -> trace_pb2.Span.SpanKind:
) -> int:
return SPAN_KIND_MAPPING.get(
span_kind, trace_pb2.Span.SpanKind.SPAN_KIND_UNSPECIFIED
span_kind, trace_types.Span.SpanKind.SPAN_KIND_UNSPECIFIED
)


Expand Down Expand Up @@ -419,19 +430,19 @@ def _extract_attributes(
attrs: types.Attributes,
num_attrs_limit: int,
add_agent_attr: bool = False,
) -> trace_pb2.Span.Attributes:
) -> trace_types.Span.Attributes:
"""Convert span.attributes to dict."""
attributes_dict = BoundedDict(
num_attrs_limit
) # type: BoundedDict[str, trace_pb2.AttributeValue]
) # type: BoundedDict[str, trace_types.AttributeValue]
invalid_value_dropped_count = 0
for key, value in attrs.items() if attrs else []:
key = _truncate_str(key, 128)[0]
for ot_key, ot_value in attrs.items() if attrs else []:
key = _truncate_str(ot_key, 128)[0]
if key in LABELS_MAPPING: # pylint: disable=consider-using-get
key = LABELS_MAPPING[key]
value = _format_attribute_value(value)
value = _format_attribute_value(ot_value)

if value:
if value is not None:
attributes_dict[key] = value
else:
invalid_value_dropped_count += 1
Expand All @@ -444,16 +455,30 @@ def _extract_attributes(
_strip_characters(__version__),
)
)
return trace_pb2.Span.Attributes(
attribute_map=attributes_dict,
return trace_types.Span.Attributes(
attribute_map=dict(attributes_dict),
dropped_attributes_count=attributes_dict.dropped
+ invalid_value_dropped_count,
)


@overload
def _format_attribute_value(
value: types.AttributeValue,
) -> trace_pb2.AttributeValue:
) -> trace_types.AttributeValue:
...


@overload
def _format_attribute_value(
value: Any,
) -> Optional[trace_types.AttributeValue]:
...


def _format_attribute_value(
value,
) -> Optional[trace_types.AttributeValue]:
if isinstance(value, bool):
value_type = "bool_value"
elif isinstance(value, int):
Expand All @@ -478,4 +503,4 @@ def _format_attribute_value(
)
return None

return trace_pb2.AttributeValue(**{value_type: value})
return trace_types.AttributeValue(**{value_type: value})
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
from unittest import mock

import pkg_resources
from google.cloud.trace_v2.proto.trace_pb2 import AttributeValue
from google.cloud.trace_v2.proto.trace_pb2 import Span as ProtoSpan
from google.cloud.trace_v2.proto.trace_pb2 import TruncatableString
from google.cloud.trace_v2.types import AttributeValue, BatchWriteSpansRequest
from google.cloud.trace_v2.types import Span as ProtoSpan
from google.cloud.trace_v2.types import TruncatableString
from google.rpc import code_pb2
from google.rpc.status_pb2 import Status
from opentelemetry.exporter.cloud_trace import (
Expand Down Expand Up @@ -175,7 +175,10 @@ def test_export(self):

self.assertTrue(client.batch_write_spans.called)
client.batch_write_spans.assert_called_with(
"projects/{}".format(self.project_id), [cloud_trace_spans]
request=BatchWriteSpansRequest(
name="projects/{}".format(self.project_id),
spans=[cloud_trace_spans],
)
)

def test_extract_status_code_unset(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@

import grpc
from google.cloud.trace_v2 import TraceServiceClient
from google.cloud.trace_v2.gapic.transports import trace_service_grpc_transport
from google.cloud.trace_v2.services.trace_service.transports.grpc import (
TraceServiceGrpcTransport,
)
from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import _Span as Span
Expand Down Expand Up @@ -58,9 +60,8 @@ def test_export(self):

# Setup the trace exporter.
channel = grpc.insecure_channel(self.address)
transport = trace_service_grpc_transport.TraceServiceGrpcTransport(
channel=channel
)
transport = TraceServiceGrpcTransport(channel=channel)

client = TraceServiceClient(transport=transport)
trace_exporter = CloudTraceSpanExporter(self.project_id, client=client)

Expand Down

0 comments on commit dba00c2

Please sign in to comment.