Skip to content

Commit

Permalink
Refactor metric format
Browse files Browse the repository at this point in the history
  • Loading branch information
ocelotl committed May 8, 2022
1 parent 3658e21 commit 264c0a9
Show file tree
Hide file tree
Showing 22 changed files with 735 additions and 258 deletions.
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@
# https://github.com/sphinx-doc/sphinx/pull/3744
nitpick_ignore = [
("py:class", "ValueT"),
("py:class", "opentelemetry.sdk._metrics.export.Mapping"),
# Even if wrapt is added to intersphinx_mapping, sphinx keeps failing
# with "class reference target not found: ObjectProxy".
("py:class", "ObjectProxy"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
from logging import getLogger
from os import environ
from typing import Optional, Sequence
from typing import Optional, Sequence, Dict
from grpc import ChannelCredentials, Compression
from opentelemetry.exporter.otlp.proto.grpc.exporter import (
OTLPExporterMixin,
Expand All @@ -40,9 +40,14 @@
from opentelemetry.sdk._metrics.export import (
MetricExporter,
MetricExportResult,
MappingMetricT,
)
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.util.instrumentation import (
InstrumentationScope as SDKInstrumentationScope,
)

logger = logging.getLogger(__name__)
_logger = getLogger(__name__)


class OTLPMetricExporter(
Expand Down Expand Up @@ -79,87 +84,109 @@ def __init__(
)

def _translate_data(
self, data: Sequence[Metric]
self, data: MappingMetricT
) -> ExportMetricsServiceRequest:
sdk_resource_scope_metrics = {}

for metric in data:
resource = metric.resource
scope_map = sdk_resource_scope_metrics.get(resource, {})
if not scope_map:
sdk_resource_scope_metrics[resource] = scope_map

scope_metrics = scope_map.get(metric.instrumentation_scope)

if not scope_metrics:
if metric.instrumentation_scope is not None:
scope_map[metric.instrumentation_scope] = pb2.ScopeMetrics(
scope=InstrumentationScope(
name=metric.instrumentation_scope.name,
version=metric.instrumentation_scope.version,

sdk_resource_scope_metrics: Dict[
Resource, Dict[SDKInstrumentationScope, pb2.ScopeMetrics]
] = {}

for resource, instrumentation_scope_metrics in data.items():

if resource not in sdk_resource_scope_metrics:
sdk_resource_scope_metrics[resource] = {}

for (
instrumentation_scope,
metrics,
) in instrumentation_scope_metrics.items():
if instrumentation_scope not in sdk_resource_scope_metrics:
if instrumentation_scope is None:
sdk_resource_scope_metrics[resource][
instrumentation_scope
] = pb2.ScopeMetrics()
else:
sdk_resource_scope_metrics[resource][
instrumentation_scope
] = pb2.ScopeMetrics(
scope=InstrumentationScope(
name=instrumentation_scope.name,
version=instrumentation_scope.version,
)
)
scope_metrics = sdk_resource_scope_metrics[resource][
instrumentation_scope
]

for metric in metrics:

pbmetric = pb2.Metric(
name=metric.name,
description=metric.description,
unit=metric.unit,
)
else:
scope_map[
metric.instrumentation_scope
] = pb2.ScopeMetrics()
if isinstance(metric.point, Gauge):
pt = pb2.NumberDataPoint(
attributes=self._translate_attributes(
metric.point.attributes
),
time_unix_nano=metric.point.time_unix_nano,
)
if isinstance(metric.point.value, int):
pt.as_int = metric.point.value
else:
pt.as_double = metric.point.value
pbmetric.gauge.data_points.append(pt)
elif isinstance(metric.point, Histogram):
pt = pb2.HistogramDataPoint(
attributes=self._translate_attributes(
metric.point.attributes
),
time_unix_nano=metric.point.time_unix_nano,
start_time_unix_nano=(
metric.point.start_time_unix_nano
),
count=sum(metric.point.bucket_counts),
sum=metric.point.sum,
bucket_counts=metric.point.bucket_counts,
explicit_bounds=metric.point.explicit_bounds,
)
pbmetric.histogram.aggregation_temporality = (
metric.point.aggregation_temporality
)
pbmetric.histogram.data_points.append(pt)
elif isinstance(metric.point, Sum):
pt = pb2.NumberDataPoint(
attributes=self._translate_attributes(
metric.point.attributes
),
start_time_unix_nano=(
metric.point.start_time_unix_nano
),
time_unix_nano=metric.point.time_unix_nano,
)
if isinstance(metric.point.value, int):
pt.as_int = metric.point.value
else:
pt.as_double = metric.point.value
# note that because sum is a message type, the fields
# must be set individually rather than instantiating a
# pb2.Sum and setting it once
pbmetric.sum.aggregation_temporality = (
metric.point.aggregation_temporality
)
pbmetric.sum.is_monotonic = metric.point.is_monotonic
pbmetric.sum.data_points.append(pt)
else:
_logger.warn(
"unsupported datapoint type %s", metric.point
)
continue

scope_metrics = scope_map.get(metric.instrumentation_scope)
scope_metrics.metrics.append(
pbmetric,
)

pbmetric = pb2.Metric(
name=metric.name,
description=metric.description,
unit=metric.unit,
)
if isinstance(metric.point, Gauge):
pt = pb2.NumberDataPoint(
attributes=self._translate_attributes(metric.attributes),
time_unix_nano=metric.point.time_unix_nano,
)
if isinstance(metric.point.value, int):
pt.as_int = metric.point.value
else:
pt.as_double = metric.point.value
pbmetric.gauge.data_points.append(pt)
elif isinstance(metric.point, Histogram):
pt = pb2.HistogramDataPoint(
attributes=self._translate_attributes(metric.attributes),
time_unix_nano=metric.point.time_unix_nano,
start_time_unix_nano=metric.point.start_time_unix_nano,
count=sum(metric.point.bucket_counts),
sum=metric.point.sum,
bucket_counts=metric.point.bucket_counts,
explicit_bounds=metric.point.explicit_bounds,
)
pbmetric.histogram.aggregation_temporality = (
metric.point.aggregation_temporality
)
pbmetric.histogram.data_points.append(pt)
elif isinstance(metric.point, Sum):
pt = pb2.NumberDataPoint(
attributes=self._translate_attributes(metric.attributes),
start_time_unix_nano=metric.point.start_time_unix_nano,
time_unix_nano=metric.point.time_unix_nano,
)
if isinstance(metric.point.value, int):
pt.as_int = metric.point.value
else:
pt.as_double = metric.point.value
# note that because sum is a message type, the fields must be
# set individually rather than instantiating a pb2.Sum and setting
# it once
pbmetric.sum.aggregation_temporality = (
metric.point.aggregation_temporality
)
pbmetric.sum.is_monotonic = metric.point.is_monotonic
pbmetric.sum.data_points.append(pt)
else:
logger.warn("unsupported datapoint type %s", metric.point)
continue

scope_metrics.metrics.append(
pbmetric,
)
return ExportMetricsServiceRequest(
resource_metrics=get_resource_data(
sdk_resource_scope_metrics,
Expand All @@ -170,7 +197,7 @@ def _translate_data(

def export(
self,
metrics: Sequence[Metric],
metrics: MappingMetricT,
timeout_millis: float = 10_000,
**kwargs,
) -> MetricExportResult:
Expand Down
Loading

0 comments on commit 264c0a9

Please sign in to comment.