From 369b07e1c7483556fa9da952ba27c2e2828ecefb Mon Sep 17 00:00:00 2001 From: Chengzhong Wu Date: Thu, 1 Dec 2022 14:58:56 +0800 Subject: [PATCH 1/2] chore: enforce format with prettier (#3444) --- CHANGELOG.md | 1 + api/src/api/diag.ts | 7 +- api/src/api/metrics.ts | 12 +- api/src/common/Attributes.ts | 14 +- api/src/diag/types.ts | 14 +- api/src/index.ts | 42 +- api/src/metrics/Meter.ts | 27 +- api/src/metrics/Metric.ts | 42 +- api/src/metrics/NoopMeter.ts | 31 +- api/src/metrics/ObservableResult.ts | 14 +- api/src/platform/browser/globalThis.ts | 14 +- api/src/trace/ProxyTracer.ts | 12 +- api/src/trace/internal/utils.ts | 1 - api/test/common/api/api.test.ts | 5 +- api/test/common/baggage/Baggage.test.ts | 7 +- api/test/common/metrics/Metric.test.ts | 36 +- .../noop-implementations/noop-meter.test.ts | 32 +- .../noop-tracer-provider.test.ts | 12 +- .../trace/tracestate-validators.test.ts | 5 +- api/test/tree-shaking/tree-shaking.test.ts | 7 +- eslint.config.js | 11 +- .../api-logs/src/NoopLoggerProvider.ts | 6 +- .../packages/api-logs/src/api/logs.ts | 8 +- .../api-logs/src/internal/global-utils.ts | 4 +- .../src/platform/browser/globalThis.ts | 14 +- .../packages/api-logs/src/types/LogRecord.ts | 78 +- .../api-logs/src/types/LoggerProvider.ts | 2 +- .../noop-logger-provider.test.ts | 12 +- .../src/OTLPTraceExporter.ts | 24 +- .../test/OTLPTraceExporter.test.ts | 106 ++- .../test/traceHelper.ts | 97 ++- .../src/platform/browser/OTLPTraceExporter.ts | 26 +- .../src/platform/node/OTLPTraceExporter.ts | 24 +- .../browser/CollectorTraceExporter.test.ts | 81 +-- .../test/node/CollectorTraceExporter.test.ts | 34 +- .../test/traceHelper.ts | 50 +- .../src/OTLPTraceExporter.ts | 31 +- .../test/OTLPTraceExporter.test.ts | 33 +- .../test/traceHelper.ts | 30 +- .../src/BrowserDetector.ts | 21 +- .../src/types.ts | 8 +- .../test/BrowserDetector.test.ts | 28 +- .../test/util.ts | 24 +- .../src/OTLPMetricExporter.ts | 27 +- .../test/OTLPMetricExporter.test.ts | 119 +-- .../test/metricsHelper.ts | 75 +- .../src/OTLPMetricExporterBase.ts | 57 +- .../src/OTLPMetricExporterOptions.ts | 2 +- .../platform/browser/OTLPMetricExporter.ts | 26 +- .../src/platform/node/OTLPMetricExporter.ts | 28 +- .../browser/CollectorMetricExporter.test.ts | 199 ++++-- .../common/CollectorMetricExporter.test.ts | 32 +- .../test/metricsHelper.ts | 79 +- .../test/node/CollectorMetricExporter.test.ts | 147 ++-- .../src/OTLPMetricExporter.ts | 33 +- .../test/OTLPMetricExporter.test.ts | 67 +- .../test/metricsHelper.ts | 31 +- .../src/PrometheusExporter.ts | 49 +- .../src/PrometheusSerializer.ts | 51 +- .../test/PrometheusExporter.test.ts | 46 +- .../test/PrometheusSerializer.test.ts | 225 +++--- .../test/util.ts | 24 +- .../src/fetch.ts | 28 +- .../test/fetch.test.ts | 116 +-- .../src/enums/AttributeValues.ts | 4 +- .../src/grpc-js/clientUtils.ts | 5 +- .../src/grpc-js/index.ts | 36 +- .../src/grpc-js/serverUtils.ts | 5 +- .../src/grpc-js/types.ts | 3 +- .../src/grpc/clientUtils.ts | 28 +- .../src/grpc/index.ts | 41 +- .../src/grpc/serverUtils.ts | 7 +- .../src/instrumentation.ts | 4 +- .../src/types.ts | 18 +- .../src/utils.ts | 40 +- .../test/helper.ts | 156 ++-- .../test/utils/assertionUtils.ts | 5 +- .../utils/extractMethodAndServiceUtils.ts | 27 +- .../src/http.ts | 228 ++++-- .../src/types.ts | 19 +- .../src/utils.ts | 121 +++- .../test/functionals/http-disable.test.ts | 13 +- .../test/functionals/http-enable.test.ts | 213 +++--- .../test/functionals/http-metrics.test.ts | 84 ++- .../test/functionals/https-disable.test.ts | 12 +- .../test/functionals/https-enable.test.ts | 102 +-- .../test/functionals/https-package.test.ts | 8 +- .../test/functionals/utils.test.ts | 88 ++- .../test/integrations/http-enable.test.ts | 10 +- .../test/integrations/https-enable.test.ts | 2 +- .../test/utils/DummyPropagation.ts | 3 +- .../test/utils/TestMetricReader.ts | 8 +- .../test/utils/assertSpan.ts | 21 +- .../test/utils/httpRequest.ts | 2 +- .../test/utils/httpsRequest.ts | 2 +- .../src/xhr.ts | 14 +- .../test/xhr.test.ts | 195 ++--- .../src/autoLoaderUtils.ts | 4 +- .../src/instrumentation.ts | 3 +- .../src/platform/browser/instrumentation.ts | 3 +- .../src/platform/node/ModuleNameTrie.ts | 7 +- .../node/RequireInTheMiddleSingleton.ts | 25 +- .../src/platform/node/instrumentation.ts | 48 +- .../instrumentationNodeModuleDefinition.ts | 3 +- .../node/instrumentationNodeModuleFile.ts | 3 +- .../test/common/autoLoader.test.ts | 8 +- .../test/node/InstrumentationBase.test.ts | 82 ++- .../test/node/ModuleNameTrie.test.ts | 11 +- .../node/RequireInTheMiddleSingleton.test.ts | 106 ++- .../src/TracerProviderWithEnvExporter.ts | 88 ++- .../opentelemetry-sdk-node/src/sdk.ts | 45 +- .../opentelemetry-sdk-node/src/types.ts | 2 +- .../TracerProviderWithEnvExporter.test.ts | 35 +- .../opentelemetry-sdk-node/test/sdk.test.ts | 278 +++++--- .../test/util/resource-assertions.ts | 14 +- .../src/OTLPExporterBase.ts | 20 +- .../browser/OTLPExporterBrowserBase.ts | 26 +- .../src/platform/browser/util.ts | 10 +- .../src/platform/node/OTLPExporterNodeBase.ts | 3 +- .../src/platform/node/types.ts | 5 +- .../src/platform/node/util.ts | 26 +- .../packages/otlp-exporter-base/src/util.ts | 16 +- .../test/browser/util.test.ts | 48 +- .../test/common/CollectorExporter.test.ts | 16 +- .../otlp-exporter-base/test/node/util.test.ts | 89 ++- .../otlp-exporter-base/test/testHelper.ts | 14 +- .../src/OTLPGRPCExporterNodeBase.ts | 12 +- .../otlp-grpc-exporter-base/src/index.ts | 6 +- .../otlp-grpc-exporter-base/src/types.ts | 9 +- .../otlp-grpc-exporter-base/src/util.ts | 65 +- .../test/traceHelper.ts | 77 +- .../otlp-grpc-exporter-base/test/util.test.ts | 57 +- .../otlp-proto-exporter-base/.eslintignore | 1 + .../src/OTLPProtoExporterNodeBase.ts | 11 +- .../otlp-proto-exporter-base/src/util.ts | 16 +- .../otlp-transformer/src/common/internal.ts | 21 +- .../otlp-transformer/src/common/types.ts | 10 +- .../otlp-transformer/src/metrics/index.ts | 6 +- .../otlp-transformer/src/metrics/internal.ts | 73 +- .../otlp-transformer/src/metrics/types.ts | 35 +- .../otlp-transformer/src/trace/index.ts | 33 +- .../otlp-transformer/src/trace/internal.ts | 31 +- .../otlp-transformer/src/trace/types.ts | 39 +- .../otlp-transformer/test/common.test.ts | 33 +- .../otlp-transformer/test/metrics.test.ts | 165 +++-- .../otlp-transformer/test/trace.test.ts | 109 ++- package.json | 3 + .../src/AbstractAsyncHooksContextManager.ts | 3 +- .../test/AsyncHooksContextManager.test.ts | 4 +- .../src/ZoneContextManager.ts | 13 +- .../test/ZoneContextManager.test.ts | 18 +- .../propagation/W3CBaggagePropagator.ts | 12 +- .../opentelemetry-core/src/baggage/utils.ts | 23 +- .../src/common/anchored-clock.ts | 1 - .../opentelemetry-core/src/common/time.ts | 6 +- packages/opentelemetry-core/src/index.ts | 2 +- .../src/internal/exporter.ts | 11 +- .../src/platform/browser/environment.ts | 4 +- .../src/platform/browser/globalThis.ts | 14 +- .../src/platform/browser/sdk-info.ts | 3 +- .../src/trace/W3CTraceContextPropagator.ts | 3 +- .../src/trace/sampler/ParentBasedSampler.ts | 3 +- .../opentelemetry-core/src/utils/callback.ts | 9 +- .../src/utils/environment.ts | 48 +- .../src/utils/lodash.merge.ts | 11 +- .../opentelemetry-core/src/utils/merge.ts | 24 +- .../test/baggage/W3CBaggagePropagator.test.ts | 17 +- .../test/platform/hex-to-base64.test.ts | 5 +- .../test/propagation/composite.test.ts | 3 +- .../opentelemetry-core/test/test-utils.ts | 7 +- .../trace/W3CTraceContextPropagator.test.ts | 3 +- .../test/utils/callback.test.ts | 5 +- .../test/utils/environment.test.ts | 4 +- .../test/utils/merge.test.ts | 165 +++-- .../test/utils/wrap.test.ts | 2 +- .../src/jaeger.ts | 24 +- .../src/transform.ts | 41 +- .../src/types.ts | 8 +- .../test/jaeger.test.ts | 53 +- .../test/transform.test.ts | 15 +- .../src/platform/browser/util.ts | 5 +- .../src/platform/node/util.ts | 9 +- .../src/types.ts | 5 +- .../src/zipkin.ts | 4 +- .../test/helper.ts | 2 +- .../test/node/zipkin.test.ts | 6 +- .../src/B3Propagator.ts | 6 +- .../src/B3SinglePropagator.ts | 6 +- .../test/B3Propagator.test.ts | 24 +- .../src/JaegerPropagator.ts | 17 +- .../src/types.ts | 4 +- .../test/JaegerPropagator.test.ts | 32 +- .../src/detectors/BrowserDetector.ts | 3 +- .../src/detectors/EnvDetector.ts | 2 +- .../src/detectors/ProcessDetector.ts | 3 +- .../src/platform/node/detect-resources.ts | 1 - .../test/Resource.test.ts | 40 +- .../detectors/browser/EnvDetector.test.ts | 19 +- .../detectors/browser/HostDetector.test.ts | 4 +- .../test/detectors/browser/OSDetector.test.ts | 4 +- .../detectors/browser/ProcessDetector.test.ts | 4 +- .../detectors/node/BrowserDetector.test.ts | 6 +- .../test/detectors/node/EnvDetector.test.ts | 6 +- .../detectors/node/ProcessDetector.test.ts | 4 +- .../test/util/resource-assertions.ts | 31 +- .../src/BasicTracerProvider.ts | 29 +- .../opentelemetry-sdk-trace-base/src/Span.ts | 27 +- .../src/Tracer.ts | 19 +- .../src/config.ts | 17 +- .../src/export/BatchSpanProcessorBase.ts | 12 +- .../src/export/ConsoleSpanExporter.ts | 2 +- .../src/export/SimpleSpanProcessor.ts | 31 +- .../browser/export/BatchSpanProcessor.ts | 20 +- .../src/sampler/ParentBasedSampler.ts | 3 +- .../src/sampler/TraceIdRatioBasedSampler.ts | 1 - .../browser/export/BatchSpanProcessor.test.ts | 11 +- .../test/common/BasicTracerProvider.test.ts | 84 +-- .../test/common/Sampler.test.ts | 9 +- .../test/common/Span.test.ts | 136 +++- .../test/common/Tracer.test.ts | 134 ++-- .../test/common/config.test.ts | 2 +- .../export/BatchSpanProcessorBase.test.ts | 29 +- .../common/export/ConsoleSpanExporter.test.ts | 7 +- .../export/InMemorySpanExporter.test.ts | 10 +- .../common/export/SimpleSpanProcessor.test.ts | 3 +- .../test/NodeTracerProvider.test.ts | 41 +- .../test/registration.test.ts | 12 +- .../src/StackContextManager.ts | 2 +- .../opentelemetry-sdk-trace-web/src/utils.ts | 4 +- .../test/StackContextManager.test.ts | 10 +- .../test/WebTracerProvider.test.ts | 14 +- .../test/registration.test.ts | 14 +- .../test/utils.test.ts | 20 +- .../resource/SemanticResourceAttributes.ts | 402 +++++------ .../src/trace/SemanticAttributes.ts | 675 +++++++++--------- .../src/shim.ts | 23 +- .../test/Shim.test.ts | 72 +- .../sdk-metrics/src/InstrumentDescriptor.ts | 26 +- packages/sdk-metrics/src/Instruments.ts | 56 +- packages/sdk-metrics/src/Meter.ts | 94 ++- packages/sdk-metrics/src/MeterProvider.ts | 32 +- packages/sdk-metrics/src/ObservableResult.ts | 16 +- packages/sdk-metrics/src/aggregator/Drop.ts | 9 +- .../sdk-metrics/src/aggregator/Histogram.ts | 104 +-- .../sdk-metrics/src/aggregator/LastValue.ts | 53 +- packages/sdk-metrics/src/aggregator/Sum.ts | 50 +- packages/sdk-metrics/src/aggregator/types.ts | 6 +- ...AlignedHistogramBucketExemplarReservoir.ts | 20 +- .../exemplar/AlwaysSampleExemplarFilter.ts | 2 - .../src/exemplar/ExemplarReservoir.ts | 34 +- .../src/exemplar/NeverSampleExemplarFilter.ts | 1 - .../SimpleFixedSizeExemplarReservoir.ts | 22 +- .../src/exemplar/WithTraceExemplarFilter.ts | 13 +- .../src/export/AggregationSelector.ts | 18 +- .../src/export/ConsoleMetricExporter.ts | 26 +- .../src/export/InMemoryMetricExporter.ts | 11 +- packages/sdk-metrics/src/export/MetricData.ts | 2 +- .../sdk-metrics/src/export/MetricExporter.ts | 13 +- .../sdk-metrics/src/export/MetricReader.ts | 19 +- .../export/PeriodicExportingMetricReader.ts | 44 +- packages/sdk-metrics/src/index.ts | 48 +- .../src/state/AsyncMetricStorage.ts | 9 +- .../src/state/DeltaMetricProcessor.ts | 46 +- packages/sdk-metrics/src/state/HashMap.ts | 10 +- .../sdk-metrics/src/state/MeterSharedState.ts | 118 ++- .../sdk-metrics/src/state/MetricCollector.ts | 13 +- .../sdk-metrics/src/state/MetricStorage.ts | 17 +- .../src/state/MetricStorageRegistry.ts | 53 +- .../src/state/MultiWritableMetricStorage.ts | 7 +- .../src/state/ObservableRegistry.ts | 145 ++-- .../src/state/SyncMetricStorage.ts | 14 +- .../src/state/TemporalMetricProcessor.ts | 61 +- .../src/state/WritableMetricStorage.ts | 7 +- packages/sdk-metrics/src/types.ts | 2 +- packages/sdk-metrics/src/utils.ts | 87 ++- packages/sdk-metrics/src/view/Aggregation.ts | 21 +- .../src/view/AttributesProcessor.ts | 16 +- packages/sdk-metrics/src/view/Predicate.ts | 2 +- .../src/view/RegistrationConflicts.ts | 34 +- packages/sdk-metrics/src/view/View.ts | 27 +- packages/sdk-metrics/src/view/ViewRegistry.ts | 43 +- .../sdk-metrics/test/ExemplarFilter.test.ts | 24 +- .../test/ExemplarReservoir.test.ts | 39 +- .../test/InstrumentDescriptor.test.ts | 15 +- packages/sdk-metrics/test/Instruments.test.ts | 57 +- packages/sdk-metrics/test/Meter.test.ts | 67 +- .../sdk-metrics/test/MeterProvider.test.ts | 234 +++--- .../sdk-metrics/test/ObservableResult.test.ts | 52 +- .../sdk-metrics/test/aggregator/Drop.test.ts | 15 +- .../test/aggregator/Histogram.test.ts | 68 +- .../test/aggregator/LastValue.test.ts | 48 +- .../sdk-metrics/test/aggregator/Sum.test.ts | 15 +- .../test/export/ConsoleMetricExporter.test.ts | 72 +- .../export/InMemoryMetricExporter.test.ts | 32 +- .../test/export/MetricReader.test.ts | 57 +- .../PeriodicExportingMetricReader.test.ts | 151 ++-- .../test/export/TestMetricExporter.ts | 11 +- .../test/export/TestMetricProducer.ts | 5 +- .../test/export/TestMetricReader.ts | 6 +- packages/sdk-metrics/test/export/utils.ts | 26 +- .../test/state/AsyncMetricStorage.test.ts | 290 ++++++-- .../test/state/DeltaMetricProcessor.test.ts | 14 +- .../test/state/MeterSharedState.test.ts | 202 ++++-- .../test/state/MetricCollector.test.ts | 76 +- .../test/state/MetricStorageRegistry.test.ts | 146 ++-- .../state/MultiWritableMetricStorage.test.ts | 28 +- .../test/state/ObservableRegistry.test.ts | 76 +- .../test/state/SyncMetricStorage.test.ts | 26 +- .../state/TemporalMetricProcessor.test.ts | 109 ++- packages/sdk-metrics/test/test-utils.ts | 7 +- packages/sdk-metrics/test/util.ts | 62 +- packages/sdk-metrics/test/utils.test.ts | 25 +- .../sdk-metrics/test/view/Aggregation.test.ts | 122 +++- .../test/view/AttributesProcessor.test.ts | 21 +- .../sdk-metrics/test/view/Predicate.test.ts | 5 +- packages/sdk-metrics/test/view/View.test.ts | 71 +- .../test/view/ViewRegistry.test.ts | 92 ++- prettier.config.js | 8 + 318 files changed, 7696 insertions(+), 4737 deletions(-) create mode 100644 prettier.config.js diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ec5fb321e..60a13ad70f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ For experimental package changes, see the [experimental CHANGELOG](experimental/ ### :house: (Internal) * chore: automatically generate tsconfigs [#3432](https://github.com/open-telemetry/opentelemetry-js/pull/3432) @legendecas +* chore: enforce format with prettier [#3444](https://github.com/open-telemetry/opentelemetry-js/pull/3444) @legendecas ## 1.8.0 diff --git a/api/src/api/diag.ts b/api/src/api/diag.ts index 516657d8c8..a873c2dc37 100644 --- a/api/src/api/diag.ts +++ b/api/src/api/diag.ts @@ -68,7 +68,7 @@ export class DiagAPI implements DiagLogger, DiagLoggerApi { const setLogger: DiagLoggerApi['setLogger'] = ( logger, - optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }, + optionsOrLogLevel = { logLevel: DiagLogLevel.INFO } ) => { if (logger === self) { // There isn't much we can do here. @@ -88,7 +88,10 @@ export class DiagAPI implements DiagLogger, DiagLoggerApi { } const oldLogger = getGlobal('diag'); - const newLogger = createLogLevelDiagLogger(optionsOrLogLevel.logLevel ?? DiagLogLevel.INFO, logger); + const newLogger = createLogLevelDiagLogger( + optionsOrLogLevel.logLevel ?? DiagLogLevel.INFO, + logger + ); // There already is an logger registered. We'll let it know before overwriting it. if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { const stack = new Error().stack ?? ''; diff --git a/api/src/api/metrics.ts b/api/src/api/metrics.ts index 5353d84b91..186e7cce4b 100644 --- a/api/src/api/metrics.ts +++ b/api/src/api/metrics.ts @@ -17,7 +17,11 @@ import { Meter, MeterOptions } from '../metrics/Meter'; import { MeterProvider } from '../metrics/MeterProvider'; import { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider'; -import { getGlobal, registerGlobal, unregisterGlobal } from '../internal/global-utils'; +import { + getGlobal, + registerGlobal, + unregisterGlobal, +} from '../internal/global-utils'; import { DiagAPI } from './diag'; const API_NAME = 'metrics'; @@ -58,7 +62,11 @@ export class MetricsAPI { /** * Returns a meter from the global meter provider. */ - public getMeter(name: string, version?: string, options?: MeterOptions): Meter { + public getMeter( + name: string, + version?: string, + options?: MeterOptions + ): Meter { return this.getMeterProvider().getMeter(name, version, options); } diff --git a/api/src/common/Attributes.ts b/api/src/common/Attributes.ts index f3bdfe461c..53a8166d84 100644 --- a/api/src/common/Attributes.ts +++ b/api/src/common/Attributes.ts @@ -20,7 +20,7 @@ * Note: only the own enumerable keys are counted as valid attribute keys. */ export interface Attributes { - [attributeKey: string]: AttributeValue | undefined; + [attributeKey: string]: AttributeValue | undefined; } /** @@ -29,9 +29,9 @@ export interface Attributes { * null or undefined attribute values are invalid and will result in undefined behavior. */ export type AttributeValue = - | string - | number - | boolean - | Array - | Array - | Array; + | string + | number + | boolean + | Array + | Array + | Array; diff --git a/api/src/diag/types.ts b/api/src/diag/types.ts index e2ec879f03..e861d430c1 100644 --- a/api/src/diag/types.ts +++ b/api/src/diag/types.ts @@ -113,13 +113,13 @@ export interface LoggerOptions { export interface DiagLoggerApi { /** - * Set the global DiagLogger and DiagLogLevel. - * If a global diag logger is already set, this will override it. - * - * @param logger - The {@link DiagLogger} instance to set as the default logger. - * @param options - A {@link LoggerOptions} object. If not provided, default values will be set. - * @returns `true` if the logger was successfully registered, else `false` - */ + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - The {@link DiagLogger} instance to set as the default logger. + * @param options - A {@link LoggerOptions} object. If not provided, default values will be set. + * @returns `true` if the logger was successfully registered, else `false` + */ setLogger(logger: DiagLogger, options?: LoggerOptions): boolean; /** diff --git a/api/src/index.ts b/api/src/index.ts index 63325fa97e..4b71551bca 100644 --- a/api/src/index.ts +++ b/api/src/index.ts @@ -14,25 +14,15 @@ * limitations under the License. */ -export { - BaggageEntry, - BaggageEntryMetadata, - Baggage, -} from './baggage/types'; +export { BaggageEntry, BaggageEntryMetadata, Baggage } from './baggage/types'; export { baggageEntryMetadataFromString } from './baggage/utils'; export { Exception } from './common/Exception'; export { HrTime, TimeInput } from './common/Time'; export { Attributes, AttributeValue } from './common/Attributes'; // Context APIs -export { - createContextKey, - ROOT_CONTEXT, -} from './context/context'; -export { - Context, - ContextManager, -} from './context/types'; +export { createContextKey, ROOT_CONTEXT } from './context/context'; +export { Context, ContextManager } from './context/types'; export type { ContextAPI } from './api/context'; // Diag APIs @@ -46,16 +36,9 @@ export { export type { DiagAPI } from './api/diag'; // Metrics APIs -export { - createNoopMeter, -} from './metrics/NoopMeter'; -export { - MeterOptions, - Meter, -} from './metrics/Meter'; -export { - MeterProvider, -} from './metrics/MeterProvider'; +export { createNoopMeter } from './metrics/NoopMeter'; +export { MeterOptions, Meter } from './metrics/Meter'; +export { MeterProvider } from './metrics/MeterProvider'; export { ValueType, Counter, @@ -87,10 +70,7 @@ export { export type { PropagationAPI } from './api/propagation'; // Trace APIs -export { - SpanAttributes, - SpanAttributeValue, -} from './trace/attributes'; +export { SpanAttributes, SpanAttributeValue } from './trace/attributes'; export { Link } from './trace/link'; export { ProxyTracer, TracerDelegator } from './trace/ProxyTracer'; export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; @@ -128,13 +108,7 @@ import { propagation } from './propagation-api'; import { trace } from './trace-api'; // Named export. -export { - context, - diag, - metrics, - propagation, - trace, -}; +export { context, diag, metrics, propagation, trace }; // Default export. export default { context, diff --git a/api/src/metrics/Meter.ts b/api/src/metrics/Meter.ts index 1904c48871..c399fc5fb7 100644 --- a/api/src/metrics/Meter.ts +++ b/api/src/metrics/Meter.ts @@ -84,7 +84,12 @@ export interface Meter { * @param name the name of the metric. * @param [options] the metric options. */ - createUpDownCounter(name: string, options?: MetricOptions): UpDownCounter; + createUpDownCounter< + AttributesTypes extends MetricAttributes = MetricAttributes + >( + name: string, + options?: MetricOptions + ): UpDownCounter; /** * Creates a new `ObservableGauge` metric. @@ -94,7 +99,9 @@ export interface Meter { * @param name the name of the metric. * @param [options] the metric options. */ - createObservableGauge( + createObservableGauge< + AttributesTypes extends MetricAttributes = MetricAttributes + >( name: string, options?: MetricOptions ): ObservableGauge; @@ -107,7 +114,9 @@ export interface Meter { * @param name the name of the metric. * @param [options] the metric options. */ - createObservableCounter( + createObservableCounter< + AttributesTypes extends MetricAttributes = MetricAttributes + >( name: string, options?: MetricOptions ): ObservableCounter; @@ -120,7 +129,9 @@ export interface Meter { * @param name the name of the metric. * @param [options] the metric options. */ - createObservableUpDownCounter( + createObservableUpDownCounter< + AttributesTypes extends MetricAttributes = MetricAttributes + >( name: string, options?: MetricOptions ): ObservableUpDownCounter; @@ -139,7 +150,9 @@ export interface Meter { * @param callback the batch observable callback * @param observables the observables associated with this batch observable callback */ - addBatchObservableCallback( + addBatchObservableCallback< + AttributesTypes extends MetricAttributes = MetricAttributes + >( callback: BatchObservableCallback, observables: Observable[] ): void; @@ -153,7 +166,9 @@ export interface Meter { * @param callback the batch observable callback * @param observables the observables associated with this batch observable callback */ - removeBatchObservableCallback( + removeBatchObservableCallback< + AttributesTypes extends MetricAttributes = MetricAttributes + >( callback: BatchObservableCallback, observables: Observable[] ): void; diff --git a/api/src/metrics/Metric.ts b/api/src/metrics/Metric.ts index 687369b92f..36d773441e 100644 --- a/api/src/metrics/Metric.ts +++ b/api/src/metrics/Metric.ts @@ -62,21 +62,27 @@ export enum ValueType { *
  • count the number of 5xx errors.
  • *
      */ -export interface Counter { +export interface Counter< + AttributesTypes extends MetricAttributes = MetricAttributes +> { /** * Increment value of counter by the input. Inputs must not be negative. */ add(value: number, attributes?: AttributesTypes, context?: Context): void; } -export interface UpDownCounter { +export interface UpDownCounter< + AttributesTypes extends MetricAttributes = MetricAttributes +> { /** * Increment value of counter by the input. Inputs may be negative. */ add(value: number, attributes?: AttributesTypes, context?: Context): void; } -export interface Histogram { +export interface Histogram< + AttributesTypes extends MetricAttributes = MetricAttributes +> { /** * Records a measurement. Value of the measurement must not be negative. */ @@ -96,16 +102,24 @@ export type MetricAttributeValue = AttributeValue; /** * The observable callback for Observable instruments. */ -export type ObservableCallback = - (observableResult: ObservableResult) => void | Promise; +export type ObservableCallback< + AttributesTypes extends MetricAttributes = MetricAttributes +> = ( + observableResult: ObservableResult +) => void | Promise; /** * The observable callback for a batch of Observable instruments. */ -export type BatchObservableCallback = - (observableResult: BatchObservableResult) => void | Promise; +export type BatchObservableCallback< + AttributesTypes extends MetricAttributes = MetricAttributes +> = ( + observableResult: BatchObservableResult +) => void | Promise; -export interface Observable { +export interface Observable< + AttributesTypes extends MetricAttributes = MetricAttributes +> { /** * Sets up a function that will be called whenever a metric collection is initiated. * @@ -119,6 +133,12 @@ export interface Observable): void; } -export type ObservableCounter = Observable; -export type ObservableUpDownCounter = Observable; -export type ObservableGauge = Observable; +export type ObservableCounter< + AttributesTypes extends MetricAttributes = MetricAttributes +> = Observable; +export type ObservableUpDownCounter< + AttributesTypes extends MetricAttributes = MetricAttributes +> = Observable; +export type ObservableGauge< + AttributesTypes extends MetricAttributes = MetricAttributes +> = Observable; diff --git a/api/src/metrics/NoopMeter.ts b/api/src/metrics/NoopMeter.ts index de9a45eaac..81143c0ddd 100644 --- a/api/src/metrics/NoopMeter.ts +++ b/api/src/metrics/NoopMeter.ts @@ -62,7 +62,7 @@ export class NoopMeter implements Meter { */ createObservableGauge( _name: string, - _options?: MetricOptions, + _options?: MetricOptions ): ObservableGauge { return NOOP_OBSERVABLE_GAUGE_METRIC; } @@ -72,7 +72,7 @@ export class NoopMeter implements Meter { */ createObservableCounter( _name: string, - _options?: MetricOptions, + _options?: MetricOptions ): ObservableCounter { return NOOP_OBSERVABLE_COUNTER_METRIC; } @@ -82,7 +82,7 @@ export class NoopMeter implements Meter { */ createObservableUpDownCounter( _name: string, - _options?: MetricOptions, + _options?: MetricOptions ): ObservableUpDownCounter { return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; } @@ -90,7 +90,10 @@ export class NoopMeter implements Meter { /** * @see {@link Meter.addBatchObservableCallback} */ - addBatchObservableCallback(_callback: BatchObservableCallback, _observables: Observable[]): void {} + addBatchObservableCallback( + _callback: BatchObservableCallback, + _observables: Observable[] + ): void {} /** * @see {@link Meter.removeBatchObservableCallback} @@ -104,7 +107,10 @@ export class NoopCounterMetric extends NoopMetric implements Counter { add(_value: number, _attributes: MetricAttributes): void {} } -export class NoopUpDownCounterMetric extends NoopMetric implements UpDownCounter { +export class NoopUpDownCounterMetric + extends NoopMetric + implements UpDownCounter +{ add(_value: number, _attributes: MetricAttributes): void {} } @@ -118,11 +124,17 @@ export class NoopObservableMetric { removeCallback(_callback: ObservableCallback) {} } -export class NoopObservableCounterMetric extends NoopObservableMetric implements ObservableCounter {} +export class NoopObservableCounterMetric + extends NoopObservableMetric + implements ObservableCounter {} -export class NoopObservableGaugeMetric extends NoopObservableMetric implements ObservableGauge {} +export class NoopObservableGaugeMetric + extends NoopObservableMetric + implements ObservableGauge {} -export class NoopObservableUpDownCounterMetric extends NoopObservableMetric implements ObservableUpDownCounter {} +export class NoopObservableUpDownCounterMetric + extends NoopObservableMetric + implements ObservableUpDownCounter {} export const NOOP_METER = new NoopMeter(); @@ -134,7 +146,8 @@ export const NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); // Asynchronous instruments export const NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); export const NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); -export const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); +export const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = + new NoopObservableUpDownCounterMetric(); /** * Create a no-op Meter diff --git a/api/src/metrics/ObservableResult.ts b/api/src/metrics/ObservableResult.ts index 3bc4bc9c11..70d0eff1dc 100644 --- a/api/src/metrics/ObservableResult.ts +++ b/api/src/metrics/ObservableResult.ts @@ -19,7 +19,9 @@ import { MetricAttributes, Observable } from './Metric'; /** * Interface that is being used in callback function for Observable Metric. */ -export interface ObservableResult { +export interface ObservableResult< + AttributesTypes extends MetricAttributes = MetricAttributes +> { /** * Observe a measurement of the value associated with the given attributes. * @@ -34,7 +36,9 @@ export interface ObservableResult { +export interface BatchObservableResult< + AttributesTypes extends MetricAttributes = MetricAttributes +> { /** * Observe a measurement of the value associated with the given attributes. * @@ -44,5 +48,9 @@ export interface BatchObservableResult, value: number, attributes?: AttributesTypes): void; + observe( + metric: Observable, + value: number, + attributes?: AttributesTypes + ): void; } diff --git a/api/src/platform/browser/globalThis.ts b/api/src/platform/browser/globalThis.ts index 76b640b668..f09378a95e 100644 --- a/api/src/platform/browser/globalThis.ts +++ b/api/src/platform/browser/globalThis.ts @@ -27,8 +27,12 @@ /** only globals that common to node and browsers are allowed */ // eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef export const _globalThis: typeof globalThis = - typeof globalThis === 'object' ? globalThis : - typeof self === 'object' ? self : - typeof window === 'object' ? window : - typeof global === 'object' ? global : - {} as typeof globalThis; + typeof globalThis === 'object' + ? globalThis + : typeof self === 'object' + ? self + : typeof window === 'object' + ? window + : typeof global === 'object' + ? global + : ({} as typeof globalThis); diff --git a/api/src/trace/ProxyTracer.ts b/api/src/trace/ProxyTracer.ts index a858bdc3ce..6c2c77cfea 100644 --- a/api/src/trace/ProxyTracer.ts +++ b/api/src/trace/ProxyTracer.ts @@ -60,7 +60,11 @@ export class ProxyTracer implements Tracer { return this._delegate; } - const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + const tracer = this._provider.getDelegateTracer( + this.name, + this.version, + this.options + ); if (!tracer) { return NOOP_TRACER; @@ -72,5 +76,9 @@ export class ProxyTracer implements Tracer { } export interface TracerDelegator { - getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; + getDelegateTracer( + name: string, + version?: string, + options?: TracerOptions + ): Tracer | undefined; } diff --git a/api/src/trace/internal/utils.ts b/api/src/trace/internal/utils.ts index 080be77848..a070143e66 100644 --- a/api/src/trace/internal/utils.ts +++ b/api/src/trace/internal/utils.ts @@ -17,7 +17,6 @@ import { TraceState } from '../trace_state'; import { TraceStateImpl } from './tracestate-impl'; - export function createTraceState(rawTraceState?: string): TraceState { return new TraceStateImpl(rawTraceState); } diff --git a/api/test/common/api/api.test.ts b/api/test/common/api/api.test.ts index 3184827cc6..23ffcf5732 100644 --- a/api/test/common/api/api.test.ts +++ b/api/test/common/api/api.test.ts @@ -58,7 +58,10 @@ describe('API', () => { it('getActiveSpan should get the current span', () => { const span = new NonRecordingSpan(); const ctx = trace.setSpan(ROOT_CONTEXT, span); - context.setGlobalContextManager({ active: () => ctx, disable: () => {} } as any); + context.setGlobalContextManager({ + active: () => ctx, + disable: () => {}, + } as any); const active = trace.getActiveSpan(); assert.strictEqual(active, span); diff --git a/api/test/common/baggage/Baggage.test.ts b/api/test/common/baggage/Baggage.test.ts index 28401c0524..2eacda29a2 100644 --- a/api/test/common/baggage/Baggage.test.ts +++ b/api/test/common/baggage/Baggage.test.ts @@ -135,12 +135,15 @@ describe('Baggage', () => { it('should get the current baggage', () => { const entries = { - 'banana': {value: 'boats'} + banana: { value: 'boats' }, }; const bag = propagation.createBaggage(entries); const ctx = propagation.setBaggage(ROOT_CONTEXT, bag); - context.setGlobalContextManager({ active: () => ctx, disable: () => {} } as any); + context.setGlobalContextManager({ + active: () => ctx, + disable: () => {}, + } as any); assert.strictEqual(bag, propagation.getActiveBaggage()); diff --git a/api/test/common/metrics/Metric.test.ts b/api/test/common/metrics/Metric.test.ts index d7111d01f7..95422d4cc2 100644 --- a/api/test/common/metrics/Metric.test.ts +++ b/api/test/common/metrics/Metric.test.ts @@ -17,90 +17,90 @@ import { Counter, UpDownCounter, Histogram } from '../../../src'; describe('Metric', () => { - describe('Counter', () =>{ + describe('Counter', () => { it('enable not to define any type', () => { const counter: Counter = { - add(_value: number, _attribute: unknown) {} + add(_value: number, _attribute: unknown) {}, }; counter.add(1, { 'some-attribute': 'value' }); }); it('enable to use with type', () => { type Attributes = { - 'some-attribute': string + 'some-attribute': string; }; const counter: Counter = { - add(_value: number, _attribute: Attributes) {} + add(_value: number, _attribute: Attributes) {}, }; counter.add(1, { 'some-attribute': 'value' }); }); it('disable wrong attributes by typing', () => { type Attributes = { - 'some-attribute': string + 'some-attribute': string; }; const counter: Counter = { - add(_value: number, _attribute: Attributes) {} + add(_value: number, _attribute: Attributes) {}, }; // @ts-expect-error Expacting the type of Attributes counter.add(1, { 'another-attribute': 'value' }); }); }); - describe('UpDownCounter', () =>{ + describe('UpDownCounter', () => { it('enable not to define any type', () => { const counter: UpDownCounter = { - add(_value: number, _attribute: unknown) {} + add(_value: number, _attribute: unknown) {}, }; counter.add(1, { 'some-attribute': 'value' }); }); it('enable to use with type', () => { type Attributes = { - 'some-attribute': string + 'some-attribute': string; }; const counter: UpDownCounter = { - add(_value: number, _attribute: Attributes) {} + add(_value: number, _attribute: Attributes) {}, }; counter.add(1, { 'some-attribute': 'value' }); }); it('disable wrong attributes by typing', () => { type Attributes = { - 'some-attribute': string + 'some-attribute': string; }; const counter: UpDownCounter = { - add(_value: number, _attribute: Attributes) {} + add(_value: number, _attribute: Attributes) {}, }; // @ts-expect-error Expacting the type of Attributes counter.add(1, { 'another-attribute': 'value' }); }); }); - describe('Histogram', () =>{ + describe('Histogram', () => { it('enable not to define any type', () => { const counter: Histogram = { - record(_value: number, _attribute: unknown) {} + record(_value: number, _attribute: unknown) {}, }; counter.record(1, { 'some-attribute': 'value' }); }); it('enable to use with type', () => { type Attributes = { - 'some-attribute': string + 'some-attribute': string; }; const counter: Histogram = { - record(_value: number, _attribute: Attributes) {} + record(_value: number, _attribute: Attributes) {}, }; counter.record(1, { 'some-attribute': 'value' }); }); it('disable wrong attributes by typing', () => { type Attributes = { - 'some-attribute': string + 'some-attribute': string; }; const counter: Histogram = { - record(_value: number, _attribute: Attributes) {} + record(_value: number, _attribute: Attributes) {}, }; // @ts-expect-error Expacting the type of Attributes counter.record(1, { 'another-attribute': 'value' }); diff --git a/api/test/common/noop-implementations/noop-meter.test.ts b/api/test/common/noop-implementations/noop-meter.test.ts index 951bdb80e7..cde2b094de 100644 --- a/api/test/common/noop-implementations/noop-meter.test.ts +++ b/api/test/common/noop-implementations/noop-meter.test.ts @@ -61,10 +61,7 @@ describe('NoopMeter', () => { // ensure the correct noop const is returned assert.strictEqual(histogram, NOOP_HISTOGRAM_METRIC); - const histogramWithOptions = meter.createHistogram( - 'some-name', - options - ); + const histogramWithOptions = meter.createHistogram('some-name', options); assert.strictEqual(histogramWithOptions, NOOP_HISTOGRAM_METRIC); }); @@ -95,7 +92,10 @@ describe('NoopMeter', () => { 'some-name', options ); - assert.strictEqual(observableCounterWithOptions, NOOP_OBSERVABLE_COUNTER_METRIC); + assert.strictEqual( + observableCounterWithOptions, + NOOP_OBSERVABLE_COUNTER_METRIC + ); }); it('observable gauge should not crash', () => { @@ -110,22 +110,30 @@ describe('NoopMeter', () => { 'some-name', options ); - assert.strictEqual(observableGaugeWithOptions, NOOP_OBSERVABLE_GAUGE_METRIC); + assert.strictEqual( + observableGaugeWithOptions, + NOOP_OBSERVABLE_GAUGE_METRIC + ); }); it('observable up down counter should not crash', () => { const meter = new NoopMeterProvider().getMeter('test-noop'); - const observableUpDownCounter = meter.createObservableUpDownCounter('some-name'); + const observableUpDownCounter = + meter.createObservableUpDownCounter('some-name'); observableUpDownCounter.addCallback(() => {}); // ensure the correct noop const is returned - assert.strictEqual(observableUpDownCounter, NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC); + assert.strictEqual( + observableUpDownCounter, + NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC + ); - const observableUpDownCounterWithOptions = meter.createObservableUpDownCounter( - 'some-name', - options + const observableUpDownCounterWithOptions = + meter.createObservableUpDownCounter('some-name', options); + assert.strictEqual( + observableUpDownCounterWithOptions, + NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC ); - assert.strictEqual(observableUpDownCounterWithOptions, NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC); }); it('batch callback should not crash', () => { diff --git a/api/test/common/noop-implementations/noop-tracer-provider.test.ts b/api/test/common/noop-implementations/noop-tracer-provider.test.ts index d9eda44ee9..204cea6fe7 100644 --- a/api/test/common/noop-implementations/noop-tracer-provider.test.ts +++ b/api/test/common/noop-implementations/noop-tracer-provider.test.ts @@ -23,9 +23,13 @@ describe('NoopTracerProvider', () => { const tracerProvider = new NoopTracerProvider(); assert.ok(tracerProvider.getTracer('tracer-name') instanceof NoopTracer); - assert.ok(tracerProvider.getTracer('tracer-name', 'v1') instanceof NoopTracer); - assert.ok(tracerProvider.getTracer('tracer-name', 'v1', { - schemaUrl: 'https://opentelemetry.io/schemas/1.7.0' - }) instanceof NoopTracer); + assert.ok( + tracerProvider.getTracer('tracer-name', 'v1') instanceof NoopTracer + ); + assert.ok( + tracerProvider.getTracer('tracer-name', 'v1', { + schemaUrl: 'https://opentelemetry.io/schemas/1.7.0', + }) instanceof NoopTracer + ); }); }); diff --git a/api/test/common/trace/tracestate-validators.test.ts b/api/test/common/trace/tracestate-validators.test.ts index 15f49434eb..4f5ff6f631 100644 --- a/api/test/common/trace/tracestate-validators.test.ts +++ b/api/test/common/trace/tracestate-validators.test.ts @@ -15,7 +15,10 @@ */ import * as assert from 'assert'; -import { validateKey, validateValue } from '../../../src/trace/internal/tracestate-validators'; +import { + validateKey, + validateValue, +} from '../../../src/trace/internal/tracestate-validators'; describe('validators', () => { describe('validateKey', () => { diff --git a/api/test/tree-shaking/tree-shaking.test.ts b/api/test/tree-shaking/tree-shaking.test.ts index 5c75b8e9d5..fd419b6fdd 100644 --- a/api/test/tree-shaking/tree-shaking.test.ts +++ b/api/test/tree-shaking/tree-shaking.test.ts @@ -38,7 +38,7 @@ describe('tree-shaking', () => { }, { name: 'TraceAPI', - export: 'trace' + export: 'trace', }, ]; const APIMatcher = /(?:class|function) (\w+API)/g; @@ -76,12 +76,11 @@ describe('tree-shaking', () => { minimize: false, // disable module concatenation so that variable names will not be mangled. concatenateModules: false, - } + }, }); const fs = new Union(); - fs.use(mfs as any) - .use(realFs); + fs.use(mfs as any).use(realFs); //direct webpack to use unionfs for file input compiler.inputFileSystem = fs; diff --git a/eslint.config.js b/eslint.config.js index 5e8f1b50e6..2ec8974641 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -2,19 +2,16 @@ module.exports = { plugins: [ "@typescript-eslint", "header", - "node" + "node", + "prettier" ], - extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:prettier/recommended"], parser: "@typescript-eslint/parser", parserOptions: { "project": "./tsconfig.json" }, rules: { - "indent": ["error", 2, { "SwitchCase": 1 }], - "no-trailing-spaces": "error", - "eol-last": "error", "quotes": ["error", "single", { "avoidEscape": true }], - "brace-style": ["error", "1tbs"], "eqeqeq": [ "error", "smart" @@ -22,7 +19,6 @@ module.exports = { "prefer-rest-params": "off", "no-console": "error", "no-shadow": "off", - "arrow-parens": ["error", "as-needed"], "node/no-deprecated-api": ["warn"], "header/header": ["error", "block", [{ pattern: / \* Copyright The OpenTelemetry Authors[\r\n]+ \*[\r\n]+ \* Licensed under the Apache License, Version 2\.0 \(the \"License\"\);[\r\n]+ \* you may not use this file except in compliance with the License\.[\r\n]+ \* You may obtain a copy of the License at[\r\n]+ \*[\r\n]+ \* https:\/\/www\.apache\.org\/licenses\/LICENSE-2\.0[\r\n]+ \*[\r\n]+ \* Unless required by applicable law or agreed to in writing, software[\r\n]+ \* distributed under the License is distributed on an \"AS IS\" BASIS,[\r\n]+ \* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.[\r\n]+ \* See the License for the specific language governing permissions and[\r\n]+ \* limitations under the License\./gm, @@ -54,7 +50,6 @@ module.exports = { } }], "@typescript-eslint/no-shadow": ["warn"], - "@typescript-eslint/semi": "error" } }, { diff --git a/experimental/packages/api-logs/src/NoopLoggerProvider.ts b/experimental/packages/api-logs/src/NoopLoggerProvider.ts index 5c69e5a7bd..aea947ed80 100644 --- a/experimental/packages/api-logs/src/NoopLoggerProvider.ts +++ b/experimental/packages/api-logs/src/NoopLoggerProvider.ts @@ -20,7 +20,11 @@ import { LoggerOptions } from './types/LoggerOptions'; import { NoopLogger } from './NoopLogger'; export class NoopLoggerProvider implements LoggerProvider { - getLogger(_name: string, _version?: string | undefined, _options?: LoggerOptions | undefined): Logger { + getLogger( + _name: string, + _version?: string | undefined, + _options?: LoggerOptions | undefined + ): Logger { return new NoopLogger(); } } diff --git a/experimental/packages/api-logs/src/api/logs.ts b/experimental/packages/api-logs/src/api/logs.ts index 0f9388648a..d6f24b5e07 100644 --- a/experimental/packages/api-logs/src/api/logs.ts +++ b/experimental/packages/api-logs/src/api/logs.ts @@ -18,7 +18,7 @@ import { API_BACKWARDS_COMPATIBILITY_VERSION, GLOBAL_LOGS_API_KEY, _global, - makeGetter + makeGetter, } from '../internal/global-utils'; import { LoggerProvider } from '../types/LoggerProvider'; import { NOOP_LOGGER_PROVIDER } from '../NoopLoggerProvider'; @@ -69,7 +69,11 @@ export class LogsAPI { * * @returns Logger */ - public getLogger(name: string, version?: string, options?: LoggerOptions): Logger { + public getLogger( + name: string, + version?: string, + options?: LoggerOptions + ): Logger { return this.getLoggerProvider().getLogger(name, version, options); } diff --git a/experimental/packages/api-logs/src/internal/global-utils.ts b/experimental/packages/api-logs/src/internal/global-utils.ts index 705139aeda..6325db8144 100644 --- a/experimental/packages/api-logs/src/internal/global-utils.ts +++ b/experimental/packages/api-logs/src/internal/global-utils.ts @@ -17,9 +17,7 @@ import { LoggerProvider } from '../types/LoggerProvider'; import { _globalThis } from '../platform'; -export const GLOBAL_LOGS_API_KEY = Symbol.for( - 'io.opentelemetry.js.api.logs' -); +export const GLOBAL_LOGS_API_KEY = Symbol.for('io.opentelemetry.js.api.logs'); type Get = (version: number) => T; type OtelGlobal = Partial<{ diff --git a/experimental/packages/api-logs/src/platform/browser/globalThis.ts b/experimental/packages/api-logs/src/platform/browser/globalThis.ts index 1dece570f3..e8a79351b2 100644 --- a/experimental/packages/api-logs/src/platform/browser/globalThis.ts +++ b/experimental/packages/api-logs/src/platform/browser/globalThis.ts @@ -28,8 +28,12 @@ /** only globals that common to node and browsers are allowed */ // eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef export const _globalThis: typeof globalThis = - typeof globalThis === 'object' ? globalThis : - typeof self === 'object' ? self : - typeof window === 'object' ? window : - typeof global === 'object' ? global : - {} as typeof globalThis; + typeof globalThis === 'object' + ? globalThis + : typeof self === 'object' + ? self + : typeof window === 'object' + ? window + : typeof global === 'object' + ? global + : ({} as typeof globalThis); diff --git a/experimental/packages/api-logs/src/types/LogRecord.ts b/experimental/packages/api-logs/src/types/LogRecord.ts index 132900c54a..dab3302911 100644 --- a/experimental/packages/api-logs/src/types/LogRecord.ts +++ b/experimental/packages/api-logs/src/types/LogRecord.ts @@ -17,43 +17,43 @@ import { Attributes } from '@opentelemetry/api'; export interface LogRecord { - /** - * The time when the log record occurred as UNIX Epoch time in nanoseconds. - */ - timestamp?: number; - - /** - * Numerical value of the severity. - */ - severityNumber?: number; - - /** - * The severity text. - */ - severityText?: string; - - /** - * A value containing the body of the log record. - */ - body?: string; - - /** - * Attributes that define the log record. - */ - attributes?: Attributes; - - /** - * 8 least significant bits are the trace flags as defined in W3C Trace Context specification. - */ - traceFlags?: number; - - /** - * A unique identifier for a trace. - */ - traceId?: string; - - /** - * A unique identifier for a span within a trace. - */ - spanId?: string; + /** + * The time when the log record occurred as UNIX Epoch time in nanoseconds. + */ + timestamp?: number; + + /** + * Numerical value of the severity. + */ + severityNumber?: number; + + /** + * The severity text. + */ + severityText?: string; + + /** + * A value containing the body of the log record. + */ + body?: string; + + /** + * Attributes that define the log record. + */ + attributes?: Attributes; + + /** + * 8 least significant bits are the trace flags as defined in W3C Trace Context specification. + */ + traceFlags?: number; + + /** + * A unique identifier for a trace. + */ + traceId?: string; + + /** + * A unique identifier for a span within a trace. + */ + spanId?: string; } diff --git a/experimental/packages/api-logs/src/types/LoggerProvider.ts b/experimental/packages/api-logs/src/types/LoggerProvider.ts index e79aa1ce09..10ed6debf2 100644 --- a/experimental/packages/api-logs/src/types/LoggerProvider.ts +++ b/experimental/packages/api-logs/src/types/LoggerProvider.ts @@ -21,7 +21,7 @@ import { LoggerOptions } from './LoggerOptions'; * A registry for creating named {@link Logger}s. */ export interface LoggerProvider { - /** + /** * Returns a Logger, creating one if one with the given name, version, and * schemaUrl pair is not already created. * diff --git a/experimental/packages/api-logs/test/noop-implementations/noop-logger-provider.test.ts b/experimental/packages/api-logs/test/noop-implementations/noop-logger-provider.test.ts index e696c77b01..06c57b0c7b 100644 --- a/experimental/packages/api-logs/test/noop-implementations/noop-logger-provider.test.ts +++ b/experimental/packages/api-logs/test/noop-implementations/noop-logger-provider.test.ts @@ -23,9 +23,13 @@ describe('NoopLoggerProvider', () => { const loggerProvider = new NoopLoggerProvider(); assert.ok(loggerProvider.getLogger('logger-name') instanceof NoopLogger); - assert.ok(loggerProvider.getLogger('logger-name', 'v1') instanceof NoopLogger); - assert.ok(loggerProvider.getLogger('logger-name', 'v1', { - schemaUrl: 'https://opentelemetry.io/schemas/1.7.0' - }) instanceof NoopLogger); + assert.ok( + loggerProvider.getLogger('logger-name', 'v1') instanceof NoopLogger + ); + assert.ok( + loggerProvider.getLogger('logger-name', 'v1', { + schemaUrl: 'https://opentelemetry.io/schemas/1.7.0', + }) instanceof NoopLogger + ); }); }); diff --git a/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts index f4b0554715..2cc5abcb2e 100644 --- a/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts @@ -22,21 +22,25 @@ import { OTLPGRPCExporterNodeBase, ServiceClientType, validateAndNormalizeUrl, - DEFAULT_COLLECTOR_URL + DEFAULT_COLLECTOR_URL, } from '@opentelemetry/otlp-grpc-exporter-base'; -import { createExportTraceServiceRequest, IExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + createExportTraceServiceRequest, + IExportTraceServiceRequest, +} from '@opentelemetry/otlp-transformer'; /** * OTLP Trace Exporter for Node */ export class OTLPTraceExporter - extends OTLPGRPCExporterNodeBase - implements SpanExporter { - + extends OTLPGRPCExporterNodeBase + implements SpanExporter +{ constructor(config: OTLPGRPCExporterConfigNode = {}) { super(config); - const headers = baggageUtils.parseKeyPairsIntoRecord(getEnv().OTEL_EXPORTER_OTLP_TRACES_HEADERS); + const headers = baggageUtils.parseKeyPairsIntoRecord( + getEnv().OTEL_EXPORTER_OTLP_TRACES_HEADERS + ); this.metadata ||= new Metadata(); for (const [k, v] of Object.entries(headers)) { this.metadata.set(k, v); @@ -64,8 +68,10 @@ export class OTLPTraceExporter return config.url; } - return getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT || + return ( + getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT || getEnv().OTEL_EXPORTER_OTLP_ENDPOINT || - DEFAULT_COLLECTOR_URL; + DEFAULT_COLLECTOR_URL + ); } } diff --git a/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts b/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts index d1052e9edc..29c080b966 100644 --- a/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts +++ b/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts @@ -37,11 +37,16 @@ import { import * as core from '@opentelemetry/core'; import { CompressionAlgorithm } from '@opentelemetry/otlp-exporter-base'; import { GrpcCompressionAlgorithm } from '@opentelemetry/otlp-grpc-exporter-base'; -import { IExportTraceServiceRequest, IResourceSpans } from '@opentelemetry/otlp-transformer'; +import { + IExportTraceServiceRequest, + IResourceSpans, +} from '@opentelemetry/otlp-transformer'; const traceServiceProtoPath = 'opentelemetry/proto/collector/trace/v1/trace_service.proto'; -const includeDirs = [path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos')]; +const includeDirs = [ + path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos'), +]; const address = 'localhost:1501'; @@ -59,9 +64,7 @@ const testCollectorExporter = (params: TestParams) => } TLS, ${params.metadata ? 'with' : 'without'} metadata`, () => { let collectorExporter: OTLPTraceExporter; let server: grpc.Server; - let exportedData: - | IResourceSpans - | undefined; + let exportedData: IResourceSpans | undefined; let reqMetadata: grpc.Metadata | undefined; before(done => { @@ -76,9 +79,8 @@ const testCollectorExporter = (params: TestParams) => includeDirs, }) .then((packageDefinition: protoLoader.PackageDefinition) => { - const packageObject: any = grpc.loadPackageDefinition( - packageDefinition - ); + const packageObject: any = + grpc.loadPackageDefinition(packageDefinition); server.addService( packageObject.opentelemetry.proto.collector.trace.v1.TraceService .service, @@ -96,14 +98,14 @@ const testCollectorExporter = (params: TestParams) => ); const credentials = params.useTLS ? grpc.ServerCredentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - [ - { - cert_chain: fs.readFileSync('./test/certs/server.crt'), - private_key: fs.readFileSync('./test/certs/server.key'), - }, - ] - ) + fs.readFileSync('./test/certs/ca.crt'), + [ + { + cert_chain: fs.readFileSync('./test/certs/server.crt'), + private_key: fs.readFileSync('./test/certs/server.key'), + }, + ] + ) : grpc.ServerCredentials.createInsecure(); server.bindAsync(address, credentials, () => { server.start(); @@ -119,10 +121,10 @@ const testCollectorExporter = (params: TestParams) => beforeEach(done => { const credentials = params.useTLS ? grpc.credentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - fs.readFileSync('./test/certs/client.key'), - fs.readFileSync('./test/certs/client.crt') - ) + fs.readFileSync('./test/certs/ca.crt'), + fs.readFileSync('./test/certs/client.key'), + fs.readFileSync('./test/certs/client.crt') + ) : grpc.credentials.createInsecure(); collectorExporter = new OTLPTraceExporter({ url: 'https://' + address, @@ -181,17 +183,11 @@ const testCollectorExporter = (params: TestParams) => const spans = exportedData.scopeSpans[0].spans; const resource = exportedData.resource; - assert.ok( - typeof spans !== 'undefined', - 'spans do not exist' - ); + assert.ok(typeof spans !== 'undefined', 'spans do not exist'); ensureExportedSpanIsCorrect(spans[0]); - assert.ok( - typeof resource !== 'undefined', - "resource doesn't exist" - ); + assert.ok(typeof resource !== 'undefined', "resource doesn't exist"); ensureResourceIsCorrect(resource); @@ -203,10 +199,10 @@ const testCollectorExporter = (params: TestParams) => it('should log deadline exceeded error', done => { const credentials = params.useTLS ? grpc.credentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - fs.readFileSync('./test/certs/client.key'), - fs.readFileSync('./test/certs/client.crt') - ) + fs.readFileSync('./test/certs/ca.crt'), + fs.readFileSync('./test/certs/client.key'), + fs.readFileSync('./test/certs/client.crt') + ) : grpc.credentials.createInsecure(); const collectorExporterWithTimeout = new OTLPTraceExporter({ @@ -223,7 +219,10 @@ const testCollectorExporter = (params: TestParams) => setTimeout(() => { const result = responseSpy.args[0][0] as core.ExportResult; assert.strictEqual(result.code, core.ExportResultCode.FAILED); - assert.strictEqual(responseSpy.args[0][0].error.details, 'Deadline exceeded'); + assert.strictEqual( + responseSpy.args[0][0].error.details, + 'Deadline exceeded' + ); done(); }, 300); }); @@ -232,10 +231,10 @@ const testCollectorExporter = (params: TestParams) => beforeEach(() => { const credentials = params.useTLS ? grpc.credentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - fs.readFileSync('./test/certs/client.key'), - fs.readFileSync('./test/certs/client.crt') - ) + fs.readFileSync('./test/certs/ca.crt'), + fs.readFileSync('./test/certs/client.key'), + fs.readFileSync('./test/certs/client.crt') + ) : grpc.credentials.createInsecure(); collectorExporter = new OTLPTraceExporter({ url: 'https://' + address, @@ -259,16 +258,10 @@ const testCollectorExporter = (params: TestParams) => const spans = exportedData.scopeSpans[0].spans; const resource = exportedData.resource; - assert.ok( - typeof spans !== 'undefined', - 'spans do not exist' - ); + assert.ok(typeof spans !== 'undefined', 'spans do not exist'); ensureExportedSpanIsCorrect(spans[0]); - assert.ok( - typeof resource !== 'undefined', - "resource doesn't exist" - ); + assert.ok(typeof resource !== 'undefined', "resource doesn't exist"); ensureResourceIsCorrect(resource); ensureMetadataIsCorrect(reqMetadata, params.metadata); @@ -282,10 +275,10 @@ const testCollectorExporter = (params: TestParams) => it('should return gzip compression algorithm on exporter', () => { const credentials = params.useTLS ? grpc.credentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - fs.readFileSync('./test/certs/client.key'), - fs.readFileSync('./test/certs/client.crt') - ) + fs.readFileSync('./test/certs/ca.crt'), + fs.readFileSync('./test/certs/client.key'), + fs.readFileSync('./test/certs/client.crt') + ) : grpc.credentials.createInsecure(); envSource.OTEL_EXPORTER_OTLP_COMPRESSION = 'gzip'; @@ -294,7 +287,10 @@ const testCollectorExporter = (params: TestParams) => credentials, metadata: params.metadata, }); - assert.strictEqual(collectorExporter.compression, GrpcCompressionAlgorithm.GZIP); + assert.strictEqual( + collectorExporter.compression, + GrpcCompressionAlgorithm.GZIP + ); delete envSource.OTEL_EXPORTER_OTLP_COMPRESSION; }); }); @@ -323,20 +319,14 @@ describe('when configuring via environment', () => { it('should use url defined in env', () => { envSource.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://foo.bar'; const collectorExporter = new OTLPTraceExporter(); - assert.strictEqual( - collectorExporter.url, - 'foo.bar' - ); + assert.strictEqual(collectorExporter.url, 'foo.bar'); envSource.OTEL_EXPORTER_OTLP_ENDPOINT = ''; }); it('should override global exporter url with signal url defined in env', () => { envSource.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://foo.bar'; envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = 'http://foo.traces'; const collectorExporter = new OTLPTraceExporter(); - assert.strictEqual( - collectorExporter.url, - 'foo.traces' - ); + assert.strictEqual(collectorExporter.url, 'foo.traces'); envSource.OTEL_EXPORTER_OTLP_ENDPOINT = ''; envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = ''; }); diff --git a/experimental/packages/exporter-trace-otlp-grpc/test/traceHelper.ts b/experimental/packages/exporter-trace-otlp-grpc/test/traceHelper.ts index 5b0d0444a1..e95868f305 100644 --- a/experimental/packages/exporter-trace-otlp-grpc/test/traceHelper.ts +++ b/experimental/packages/exporter-trace-otlp-grpc/test/traceHelper.ts @@ -20,25 +20,16 @@ import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; import * as grpc from '@grpc/grpc-js'; import { VERSION } from '@opentelemetry/core'; -import { IEvent, IKeyValue, ILink, IResource, ISpan } from '@opentelemetry/otlp-transformer'; +import { + IEvent, + IKeyValue, + ILink, + IResource, + ISpan, +} from '@opentelemetry/otlp-transformer'; const traceIdArr = [ - 31, - 16, - 8, - 220, - 142, - 39, - 14, - 133, - 196, - 10, - 13, - 124, - 57, - 57, - 178, - 120, + 31, 16, 8, 220, 142, 39, 14, 133, 196, 10, 13, 124, 57, 57, 178, 120, ]; const spanIdArr = [94, 16, 114, 97, 246, 79, 165, 62]; const parentIdArr = [120, 168, 145, 80, 152, 134, 67, 136]; @@ -92,17 +83,17 @@ export const mockedReadableSpan: ReadableSpan = { }, ], duration: [0, 8885000], - resource: Resource.default().merge(new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - })), + resource: Resource.default().merge( + new Resource({ + service: 'ui', + version: 1, + cost: 112.12, + }) + ), instrumentationLibrary: { name: 'default', version: '0.0.1' }, }; -export function ensureExportedEventsAreCorrect( - events: IEvent[] -) { +export function ensureExportedEventsAreCorrect(events: IEvent[]) { assert.deepStrictEqual( events, [ @@ -159,9 +150,7 @@ export function ensureExportedEventsAreCorrect( ); } -export function ensureExportedAttributesAreCorrect( - attributes: IKeyValue[] -) { +export function ensureExportedAttributesAreCorrect(attributes: IKeyValue[]) { assert.deepStrictEqual( attributes, [ @@ -177,9 +166,7 @@ export function ensureExportedAttributesAreCorrect( ); } -export function ensureExportedLinksAreCorrect( - attributes: ILink[] -) { +export function ensureExportedLinksAreCorrect(attributes: ILink[]) { assert.deepStrictEqual( attributes, [ @@ -203,9 +190,7 @@ export function ensureExportedLinksAreCorrect( ); } -export function ensureExportedSpanIsCorrect( - span: ISpan -) { +export function ensureExportedSpanIsCorrect(span: ISpan) { if (span.attributes) { ensureExportedAttributesAreCorrect(span.attributes); } @@ -260,38 +245,36 @@ export function ensureExportedSpanIsCorrect( ); } -export function ensureResourceIsCorrect( - resource: IResource -) { +export function ensureResourceIsCorrect(resource: IResource) { assert.deepStrictEqual(resource, { attributes: [ { - 'key': 'service.name', - 'value': { - 'stringValue': `unknown_service:${process.argv0}`, - 'value': 'stringValue' - } + key: 'service.name', + value: { + stringValue: `unknown_service:${process.argv0}`, + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.language', - 'value': { - 'stringValue': 'nodejs', - 'value': 'stringValue' - } + key: 'telemetry.sdk.language', + value: { + stringValue: 'nodejs', + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.name', - 'value': { - 'stringValue': 'opentelemetry', - 'value': 'stringValue' - } + key: 'telemetry.sdk.name', + value: { + stringValue: 'opentelemetry', + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.version', - 'value': { - 'stringValue': VERSION, - 'value': 'stringValue' - } + key: 'telemetry.sdk.version', + value: { + stringValue: VERSION, + value: 'stringValue', + }, }, { key: 'service', diff --git a/experimental/packages/exporter-trace-otlp-http/src/platform/browser/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-http/src/platform/browser/OTLPTraceExporter.ts index ec12def8f2..8a343306c4 100644 --- a/experimental/packages/exporter-trace-otlp-http/src/platform/browser/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-http/src/platform/browser/OTLPTraceExporter.ts @@ -20,9 +20,12 @@ import { OTLPExporterConfigBase, appendResourcePathToUrl, appendRootPathToUrlIfNeeded, - OTLPExporterBrowserBase + OTLPExporterBrowserBase, } from '@opentelemetry/otlp-exporter-base'; -import { createExportTraceServiceRequest, IExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + createExportTraceServiceRequest, + IExportTraceServiceRequest, +} from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/traces'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; @@ -31,11 +34,9 @@ const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURC * Collector Trace Exporter for Web */ export class OTLPTraceExporter - extends OTLPExporterBrowserBase< - ReadableSpan, - IExportTraceServiceRequest - > - implements SpanExporter { + extends OTLPExporterBrowserBase + implements SpanExporter +{ constructor(config: OTLPExporterConfigBase = {}) { super(config); this._headers = Object.assign( @@ -53,9 +54,12 @@ export class OTLPTraceExporter return typeof config.url === 'string' ? config.url : getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT.length > 0 - ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) - : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 - ? appendResourcePathToUrl(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH) - : DEFAULT_COLLECTOR_URL; + ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) + : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 + ? appendResourcePathToUrl( + getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, + DEFAULT_COLLECTOR_RESOURCE_PATH + ) + : DEFAULT_COLLECTOR_URL; } } diff --git a/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts index 528a2735c3..8704baf576 100644 --- a/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts @@ -20,9 +20,12 @@ import { OTLPExporterNodeBase } from '@opentelemetry/otlp-exporter-base'; import { OTLPExporterNodeConfigBase, appendResourcePathToUrl, - appendRootPathToUrlIfNeeded + appendRootPathToUrlIfNeeded, } from '@opentelemetry/otlp-exporter-base'; -import { createExportTraceServiceRequest, IExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + createExportTraceServiceRequest, + IExportTraceServiceRequest, +} from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/traces'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; @@ -31,9 +34,9 @@ const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURC * Collector Trace Exporter for Node */ export class OTLPTraceExporter - extends OTLPExporterNodeBase - implements SpanExporter { + extends OTLPExporterNodeBase + implements SpanExporter +{ constructor(config: OTLPExporterNodeConfigBase = {}) { super(config); this.headers = Object.assign( @@ -52,9 +55,12 @@ export class OTLPTraceExporter return typeof config.url === 'string' ? config.url : getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT.length > 0 - ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) - : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 - ? appendResourcePathToUrl(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH) - : DEFAULT_COLLECTOR_URL; + ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) + : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 + ? appendResourcePathToUrl( + getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, + DEFAULT_COLLECTOR_RESOURCE_PATH + ) + : DEFAULT_COLLECTOR_URL; } } diff --git a/experimental/packages/exporter-trace-otlp-http/test/browser/CollectorTraceExporter.test.ts b/experimental/packages/exporter-trace-otlp-http/test/browser/CollectorTraceExporter.test.ts index 8981d865eb..81bc6c6b49 100644 --- a/experimental/packages/exporter-trace-otlp-http/test/browser/CollectorTraceExporter.test.ts +++ b/experimental/packages/exporter-trace-otlp-http/test/browser/CollectorTraceExporter.test.ts @@ -28,7 +28,10 @@ import { ensureHeadersContain, mockedReadableSpan, } from '../traceHelper'; -import { OTLPExporterConfigBase, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'; +import { + OTLPExporterConfigBase, + OTLPExporterError, +} from '@opentelemetry/otlp-exporter-base'; import { IExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'; describe('OTLPTraceExporter - web', () => { @@ -91,14 +94,11 @@ describe('OTLPTraceExporter - web', () => { describe('when "sendBeacon" is available', () => { beforeEach(() => { - collectorTraceExporter = new OTLPTraceExporter( - collectorExporterConfig - ); + collectorTraceExporter = new OTLPTraceExporter(collectorExporterConfig); }); it('should successfully send the spans using sendBeacon', done => { - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); setTimeout(async () => { try { @@ -106,17 +106,17 @@ describe('OTLPTraceExporter - web', () => { const url = args[0]; const blob: Blob = args[1]; const body = await blob.text(); - const json = JSON.parse( - body - ) as IExportTraceServiceRequest; - const span1 = - json.resourceSpans?.[0].scopeSpans?.[0].spans?.[0]; + const json = JSON.parse(body) as IExportTraceServiceRequest; + const span1 = json.resourceSpans?.[0].scopeSpans?.[0].spans?.[0]; assert.ok(typeof span1 !== 'undefined', "span doesn't exist"); ensureSpanIsCorrect(span1); const resource = json.resourceSpans?.[0].resource; - assert.ok(typeof resource !== 'undefined', "resource doesn't exist"); + assert.ok( + typeof resource !== 'undefined', + "resource doesn't exist" + ); ensureWebResourceIsCorrect(resource); assert.strictEqual(url, 'http://foo.bar.com'); @@ -135,22 +135,20 @@ describe('OTLPTraceExporter - web', () => { it('should log the successful message', done => { const spyLoggerDebug = sinon.stub(); const spyLoggerError = sinon.stub(); - const nop = () => { - }; + const nop = () => {}; const diagLogger: DiagLogger = { debug: spyLoggerDebug, error: spyLoggerError, info: nop, verbose: nop, - warn: nop + warn: nop, }; diag.setLogger(diagLogger, DiagLogLevel.ALL); stubBeacon.returns(true); - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); setTimeout(() => { const response: any = spyLoggerDebug.args[2][0]; @@ -181,9 +179,7 @@ describe('OTLPTraceExporter - web', () => { clock = sinon.useFakeTimers(); (window.navigator as any).sendBeacon = false; - collectorTraceExporter = new OTLPTraceExporter( - collectorExporterConfig - ); + collectorTraceExporter = new OTLPTraceExporter(collectorExporterConfig); server = sinon.fakeServer.create(); }); afterEach(() => { @@ -191,8 +187,7 @@ describe('OTLPTraceExporter - web', () => { }); it('should successfully send the spans using XMLHttpRequest', done => { - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); queueMicrotask(() => { const request = server.requests[0]; @@ -200,11 +195,8 @@ describe('OTLPTraceExporter - web', () => { assert.strictEqual(request.url, 'http://foo.bar.com'); const body = request.requestBody; - const json = JSON.parse( - body - ) as IExportTraceServiceRequest; - const span1 = - json.resourceSpans?.[0].scopeSpans?.[0].spans?.[0]; + const json = JSON.parse(body) as IExportTraceServiceRequest; + const span1 = json.resourceSpans?.[0].scopeSpans?.[0].spans?.[0]; assert.ok(typeof span1 !== 'undefined', "span doesn't exist"); ensureSpanIsCorrect(span1); @@ -224,20 +216,18 @@ describe('OTLPTraceExporter - web', () => { it('should log the successful message', done => { const spyLoggerDebug = sinon.stub(); const spyLoggerError = sinon.stub(); - const nop = () => { - }; + const nop = () => {}; const diagLogger: DiagLogger = { debug: spyLoggerDebug, error: spyLoggerError, info: nop, verbose: nop, - warn: nop + warn: nop, }; diag.setLogger(diagLogger, DiagLogLevel.ALL); - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); queueMicrotask(() => { const request = server.requests[0]; @@ -268,8 +258,7 @@ describe('OTLPTraceExporter - web', () => { }); it('should send custom headers', done => { - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); queueMicrotask(() => { const request = server.requests[0]; @@ -283,7 +272,6 @@ describe('OTLPTraceExporter - web', () => { }); }); - describe('export - common', () => { let spySend: any; beforeEach(() => { @@ -295,8 +283,7 @@ describe('OTLPTraceExporter - web', () => { const spans: ReadableSpan[] = []; spans.push(Object.assign({}, mockedReadableSpan)); - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); setTimeout(() => { const span1 = spySend.args[0][0][0] as ReadableSpan; assert.deepStrictEqual(spans[0], span1); @@ -308,7 +295,7 @@ describe('OTLPTraceExporter - web', () => { describe('when exporter is shutdown', () => { it( 'should not export anything but return callback with code' + - ' "FailedNotRetryable"', + ' "FailedNotRetryable"', async () => { const spans: ReadableSpan[] = []; spans.push(Object.assign({}, mockedReadableSpan)); @@ -385,13 +372,10 @@ describe('OTLPTraceExporter - web', () => { // located in sendWithXhr function called by the export method clock = sinon.useFakeTimers(); - collectorTraceExporter = new OTLPTraceExporter( - collectorExporterConfig - ); + collectorTraceExporter = new OTLPTraceExporter(collectorExporterConfig); }); it('should successfully send custom headers using XMLHTTPRequest', done => { - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); queueMicrotask(() => { const [{ requestHeaders }] = server.requests; @@ -414,14 +398,11 @@ describe('OTLPTraceExporter - web', () => { clock = sinon.useFakeTimers(); (window.navigator as any).sendBeacon = false; - collectorTraceExporter = new OTLPTraceExporter( - collectorExporterConfig - ); + collectorTraceExporter = new OTLPTraceExporter(collectorExporterConfig); }); it('should successfully send spans using XMLHttpRequest', done => { - collectorTraceExporter.export(spans, () => { - }); + collectorTraceExporter.export(spans, () => {}); queueMicrotask(() => { const [{ requestHeaders }] = server.requests; @@ -465,7 +446,9 @@ describe('OTLPTraceExporter - web', () => { collectorExporterWithConcurrencyLimit.export(spans, callbackSpy); } - const failures = callbackSpy.args.filter(([result]) => result.code === ExportResultCode.FAILED); + const failures = callbackSpy.args.filter( + ([result]) => result.code === ExportResultCode.FAILED + ); setTimeout(() => { // Expect 4 failures diff --git a/experimental/packages/exporter-trace-otlp-http/test/node/CollectorTraceExporter.test.ts b/experimental/packages/exporter-trace-otlp-http/test/node/CollectorTraceExporter.test.ts index 4e0a8e4c12..e7006dbda5 100644 --- a/experimental/packages/exporter-trace-otlp-http/test/node/CollectorTraceExporter.test.ts +++ b/experimental/packages/exporter-trace-otlp-http/test/node/CollectorTraceExporter.test.ts @@ -19,7 +19,7 @@ import * as core from '@opentelemetry/core'; import { CompressionAlgorithm, OTLPExporterError, - OTLPExporterNodeConfigBase + OTLPExporterNodeConfigBase, } from '@opentelemetry/otlp-exporter-base'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; @@ -27,13 +27,11 @@ import * as http from 'http'; import * as sinon from 'sinon'; import { PassThrough, Stream } from 'stream'; import * as zlib from 'zlib'; -import { - OTLPTraceExporter -} from '../../src/platform/node'; +import { OTLPTraceExporter } from '../../src/platform/node'; import { ensureExportTraceServiceRequestIsSet, ensureSpanIsCorrect, - mockedReadableSpan + mockedReadableSpan, } from '../traceHelper'; import { nextTick } from 'process'; import { MockedResponse } from './nodeHelpers'; @@ -60,14 +58,13 @@ describe('OTLPTraceExporter - node with json over http', () => { const metadata = 'foo'; // Need to stub/spy on the underlying logger as the "diag" instance is global const warnStub = sinon.stub(); - const nop = () => { - }; + const nop = () => {}; const diagLogger: DiagLogger = { debug: nop, error: nop, info: nop, verbose: nop, - warn: warnStub + warn: warnStub, }; diag.setLogger(diagLogger); @@ -148,7 +145,8 @@ describe('OTLPTraceExporter - node with json over http', () => { envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path and ends in /', () => { - envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = 'http://foo.bar/v1/traces/'; + envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = + 'http://foo.bar/v1/traces/'; const collectorExporter = new OTLPTraceExporter(); assert.strictEqual( collectorExporter.url, @@ -205,7 +203,7 @@ describe('OTLPTraceExporter - node with json over http', () => { }); it('should open the connection', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -223,7 +221,7 @@ describe('OTLPTraceExporter - node with json over http', () => { }); it('should set custom headers', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -239,7 +237,7 @@ describe('OTLPTraceExporter - node with json over http', () => { }); it('should not have Content-Encoding header', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -255,7 +253,7 @@ describe('OTLPTraceExporter - node with json over http', () => { }); it('should have keep alive and keepAliveMsecs option set', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -274,7 +272,7 @@ describe('OTLPTraceExporter - node with json over http', () => { it('different http export requests should use the same agent', done => { const clock = sinon.useFakeTimers(); - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); const mockRes = new MockedResponse(200); const args = stubRequest.args[0]; @@ -286,7 +284,7 @@ describe('OTLPTraceExporter - node with json over http', () => { nextTick(() => { const clock = sinon.useFakeTimers(); - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); const mockRes2 = new MockedResponse(200); const args2 = stubRequest.args[1]; @@ -325,7 +323,7 @@ describe('OTLPTraceExporter - node with json over http', () => { buff = Buffer.concat([buff, chunk]); }); - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); const mockRes = new MockedResponse(200); const args = stubRequest.args[0]; @@ -423,7 +421,7 @@ describe('OTLPTraceExporter - node with json over http', () => { buff = Buffer.concat([buff, chunk]); }); - collectorExporter.export(spans,() => { }); + collectorExporter.export(spans, () => {}); const mockRes = new MockedResponse(200); const args = stubRequest.args[0]; @@ -481,7 +479,7 @@ describe('OTLPTraceExporter - node with json over http', () => { collectorExporter.export(spans, responseSpy); setTimeout(() => { - fakeRequest.emit('error', { code: 'ECONNRESET'}); + fakeRequest.emit('error', { code: 'ECONNRESET' }); setTimeout(() => { const result = responseSpy.args[0][0] as core.ExportResult; diff --git a/experimental/packages/exporter-trace-otlp-http/test/traceHelper.ts b/experimental/packages/exporter-trace-otlp-http/test/traceHelper.ts index e5415fcf6a..feab35b55e 100644 --- a/experimental/packages/exporter-trace-otlp-http/test/traceHelper.ts +++ b/experimental/packages/exporter-trace-otlp-http/test/traceHelper.ts @@ -15,7 +15,11 @@ */ import { SpanStatusCode, TraceFlags } from '@opentelemetry/api'; -import { hexToBase64, InstrumentationLibrary, VERSION } from '@opentelemetry/core'; +import { + hexToBase64, + InstrumentationLibrary, + VERSION, +} from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; @@ -26,7 +30,7 @@ import { IKeyValue, ILink, IResource, - ISpan + ISpan, } from '@opentelemetry/otlp-transformer'; if (typeof Buffer === 'undefined') { @@ -44,7 +48,7 @@ const parentIdHex = '78a8915098864388'; export const mockedReadableSpan: ReadableSpan = { name: 'documentFetch', kind: 0, - spanContext: ()=> { + spanContext: () => { return { traceId: '1f1008dc8e270e85c40a0d7c3939b278', spanId: '5e107261f64fa53e', @@ -90,12 +94,13 @@ export const mockedReadableSpan: ReadableSpan = { }, ], duration: [0, 8885000], - resource: Resource.default() - .merge(new Resource({ + resource: Resource.default().merge( + new Resource({ service: 'ui', version: 1, cost: 112.12, - })), + }) + ), instrumentationLibrary: { name: 'default', version: '0.0.1' }, }; @@ -214,9 +219,7 @@ export const multiInstrumentationLibraryTrace: ReadableSpan[] = [ }, ]; -export function ensureEventsAreCorrect( - events: IEvent[] -) { +export function ensureEventsAreCorrect(events: IEvent[]) { assert.deepStrictEqual( events, [ @@ -273,9 +276,7 @@ export function ensureEventsAreCorrect( ); } -export function ensureAttributesAreCorrect( - attributes: IKeyValue[] -) { +export function ensureAttributesAreCorrect(attributes: IKeyValue[]) { assert.deepStrictEqual( attributes, [ @@ -290,10 +291,7 @@ export function ensureAttributesAreCorrect( ); } -export function ensureLinksAreCorrect( - attributes: ILink[], - useHex?: boolean -) { +export function ensureLinksAreCorrect(attributes: ILink[], useHex?: boolean) { assert.deepStrictEqual( attributes, [ @@ -315,10 +313,7 @@ export function ensureLinksAreCorrect( ); } -export function ensureSpanIsCorrect( - span: ISpan, - useHex = true -) { +export function ensureSpanIsCorrect(span: ISpan, useHex = true) { if (span.attributes) { ensureAttributesAreCorrect(span.attributes); } @@ -344,11 +339,7 @@ export function ensureSpanIsCorrect( 'parentIdArr is wrong' ); assert.strictEqual(span.name, 'documentFetch', 'name is wrong'); - assert.strictEqual( - span.kind, - ESpanKind.SPAN_KIND_INTERNAL, - 'kind is wrong' - ); + assert.strictEqual(span.kind, ESpanKind.SPAN_KIND_INTERNAL, 'kind is wrong'); assert.strictEqual( span.startTimeUnixNano, 1574120165429803008, @@ -373,12 +364,13 @@ export function ensureSpanIsCorrect( ); } -export function ensureWebResourceIsCorrect( - resource: IResource -) { +export function ensureWebResourceIsCorrect(resource: IResource) { assert.strictEqual(resource.attributes.length, 7); assert.strictEqual(resource.attributes[0].key, 'service.name'); - assert.strictEqual(resource.attributes[0].value.stringValue, 'unknown_service'); + assert.strictEqual( + resource.attributes[0].value.stringValue, + 'unknown_service' + ); assert.strictEqual(resource.attributes[1].key, 'telemetry.sdk.language'); assert.strictEqual(resource.attributes[1].value.stringValue, 'webjs'); assert.strictEqual(resource.attributes[2].key, 'telemetry.sdk.name'); diff --git a/experimental/packages/exporter-trace-otlp-proto/src/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-proto/src/OTLPTraceExporter.ts index d5c40a8214..0634013311 100644 --- a/experimental/packages/exporter-trace-otlp-proto/src/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-proto/src/OTLPTraceExporter.ts @@ -19,10 +19,16 @@ import { getEnv, baggageUtils } from '@opentelemetry/core'; import { OTLPExporterNodeConfigBase, appendResourcePathToUrl, - appendRootPathToUrlIfNeeded + appendRootPathToUrlIfNeeded, } from '@opentelemetry/otlp-exporter-base'; -import { OTLPProtoExporterNodeBase, ServiceClientType } from '@opentelemetry/otlp-proto-exporter-base'; -import { createExportTraceServiceRequest, IExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + OTLPProtoExporterNodeBase, + ServiceClientType, +} from '@opentelemetry/otlp-proto-exporter-base'; +import { + createExportTraceServiceRequest, + IExportTraceServiceRequest, +} from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/traces'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; @@ -31,11 +37,9 @@ const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURC * Collector Trace Exporter for Node with protobuf */ export class OTLPTraceExporter - extends OTLPProtoExporterNodeBase< - ReadableSpan, - IExportTraceServiceRequest - > - implements SpanExporter { + extends OTLPProtoExporterNodeBase + implements SpanExporter +{ constructor(config: OTLPExporterNodeConfigBase = {}) { super(config); this.headers = Object.assign( @@ -54,10 +58,13 @@ export class OTLPTraceExporter return typeof config.url === 'string' ? config.url : getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT.length > 0 - ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) - : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 - ? appendResourcePathToUrl(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH) - : DEFAULT_COLLECTOR_URL; + ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) + : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 + ? appendResourcePathToUrl( + getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, + DEFAULT_COLLECTOR_RESOURCE_PATH + ) + : DEFAULT_COLLECTOR_URL; } getServiceClientType() { diff --git a/experimental/packages/exporter-trace-otlp-proto/test/OTLPTraceExporter.test.ts b/experimental/packages/exporter-trace-otlp-proto/test/OTLPTraceExporter.test.ts index f8f8ec87d0..924c6b4b31 100644 --- a/experimental/packages/exporter-trace-otlp-proto/test/OTLPTraceExporter.test.ts +++ b/experimental/packages/exporter-trace-otlp-proto/test/OTLPTraceExporter.test.ts @@ -29,8 +29,15 @@ import { mockedReadableSpan, MockedResponse, } from './traceHelper'; -import { CompressionAlgorithm, OTLPExporterNodeConfigBase, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'; -import { getExportRequestProto, ServiceClientType } from '@opentelemetry/otlp-proto-exporter-base'; +import { + CompressionAlgorithm, + OTLPExporterNodeConfigBase, + OTLPExporterError, +} from '@opentelemetry/otlp-exporter-base'; +import { + getExportRequestProto, + ServiceClientType, +} from '@opentelemetry/otlp-proto-exporter-base'; import { IExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'; let fakeRequest: PassThrough; @@ -113,7 +120,8 @@ describe('OTLPTraceExporter - node with proto over http', () => { envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path and ends in /', () => { - envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = 'http://foo.bar/v1/traces/'; + envSource.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = + 'http://foo.bar/v1/traces/'; const collectorExporter = new OTLPTraceExporter(); assert.strictEqual( collectorExporter.url, @@ -158,7 +166,7 @@ describe('OTLPTraceExporter - node with proto over http', () => { }); it('should open the connection', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.hostname, 'foo.bar.com'); @@ -174,7 +182,7 @@ describe('OTLPTraceExporter - node with proto over http', () => { }); it('should set custom headers', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.headers['foo'], 'bar'); @@ -188,7 +196,7 @@ describe('OTLPTraceExporter - node with proto over http', () => { }); it('should have keep alive and keepAliveMsecs option set', done => { - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.agent.keepAlive, true); @@ -208,7 +216,9 @@ describe('OTLPTraceExporter - node with proto over http', () => { let buff = Buffer.from(''); fakeRequest.on('end', () => { - const ExportTraceServiceRequestProto = getExportRequestProto(ServiceClientType.SPANS); + const ExportTraceServiceRequestProto = getExportRequestProto( + ServiceClientType.SPANS + ); const data = ExportTraceServiceRequestProto.decode(buff); const json = data?.toJSON() as IExportTraceServiceRequest; const span1 = json.resourceSpans?.[0].scopeSpans?.[0].spans?.[0]; @@ -225,7 +235,7 @@ describe('OTLPTraceExporter - node with proto over http', () => { }); const clock = sinon.useFakeTimers(); - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); clock.tick(200); clock.restore(); }); @@ -294,7 +304,9 @@ describe('OTLPTraceExporter - node with proto over http', () => { let buff = Buffer.from(''); fakeRequest.on('end', () => { const unzippedBuff = zlib.gunzipSync(buff); - const ExportTraceServiceRequestProto = getExportRequestProto(ServiceClientType.SPANS); + const ExportTraceServiceRequestProto = getExportRequestProto( + ServiceClientType.SPANS + ); const data = ExportTraceServiceRequestProto.decode(unzippedBuff); const json = data?.toJSON() as IExportTraceServiceRequest; const span1 = json.resourceSpans?.[0].scopeSpans?.[0].spans?.[0]; @@ -312,14 +324,13 @@ describe('OTLPTraceExporter - node with proto over http', () => { }); const clock = sinon.useFakeTimers(); - collectorExporter.export(spans, () => { }); + collectorExporter.export(spans, () => {}); clock.tick(200); clock.restore(); }); }); }); - describe('export - real http request destroyed before response received', () => { let collectorExporter: OTLPTraceExporter; let collectorExporterConfig: OTLPExporterNodeConfigBase; diff --git a/experimental/packages/exporter-trace-otlp-proto/test/traceHelper.ts b/experimental/packages/exporter-trace-otlp-proto/test/traceHelper.ts index 10a0765458..67ea6593bc 100644 --- a/experimental/packages/exporter-trace-otlp-proto/test/traceHelper.ts +++ b/experimental/packages/exporter-trace-otlp-proto/test/traceHelper.ts @@ -20,7 +20,13 @@ import { Resource } from '@opentelemetry/resources'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; import { Stream } from 'stream'; -import { IEvent, IExportTraceServiceRequest, IKeyValue, ILink, ISpan } from '@opentelemetry/otlp-transformer'; +import { + IEvent, + IExportTraceServiceRequest, + IKeyValue, + ILink, + ISpan, +} from '@opentelemetry/otlp-transformer'; const traceIdHex = '1f1008dc8e270e85c40a0d7c3939b278'; const spanIdHex = '5e107261f64fa53e'; @@ -83,9 +89,7 @@ export const mockedReadableSpan: ReadableSpan = { instrumentationLibrary: { name: 'default', version: '0.0.1' }, }; -export function ensureProtoEventsAreCorrect( - events: IEvent[] -) { +export function ensureProtoEventsAreCorrect(events: IEvent[]) { assert.deepStrictEqual( events, [ @@ -134,9 +138,7 @@ export function ensureProtoEventsAreCorrect( ); } -export function ensureProtoAttributesAreCorrect( - attributes: IKeyValue[] -) { +export function ensureProtoAttributesAreCorrect(attributes: IKeyValue[]) { assert.deepStrictEqual( attributes, [ @@ -151,9 +153,7 @@ export function ensureProtoAttributesAreCorrect( ); } -export function ensureProtoLinksAreCorrect( - attributes: ILink[] -) { +export function ensureProtoLinksAreCorrect(attributes: ILink[]) { assert.deepStrictEqual( attributes, [ @@ -175,9 +175,7 @@ export function ensureProtoLinksAreCorrect( ); } -export function ensureProtoSpanIsCorrect( - span: ISpan -) { +export function ensureProtoSpanIsCorrect(span: ISpan) { if (span.attributes) { ensureProtoAttributesAreCorrect(span.attributes); } @@ -238,11 +236,7 @@ export function ensureExportTraceServiceRequestIsSet( assert.ok(resource, 'resource is missing'); const scopeSpans = resourceSpans?.[0].scopeSpans; - assert.strictEqual( - scopeSpans?.length, - 1, - 'scopeSpans is missing' - ); + assert.strictEqual(scopeSpans?.length, 1, 'scopeSpans is missing'); const scope = scopeSpans?.[0].scope; assert.ok(scope, 'scope is missing'); diff --git a/experimental/packages/opentelemetry-browser-detector/src/BrowserDetector.ts b/experimental/packages/opentelemetry-browser-detector/src/BrowserDetector.ts index d0e9dea278..318168b77c 100644 --- a/experimental/packages/opentelemetry-browser-detector/src/BrowserDetector.ts +++ b/experimental/packages/opentelemetry-browser-detector/src/BrowserDetector.ts @@ -15,7 +15,11 @@ */ import { diag } from '@opentelemetry/api'; -import { Detector, Resource, ResourceDetectionConfig } from '@opentelemetry/resources'; +import { + Detector, + Resource, + ResourceDetectionConfig, +} from '@opentelemetry/resources'; import { ResourceAttributes } from '@opentelemetry/resources'; import { BROWSER_ATTRIBUTES, UserAgentData } from './types'; @@ -43,7 +47,8 @@ class BrowserDetector implements Detector { _config?: ResourceDetectionConfig ) { if ( - !browserResource[BROWSER_ATTRIBUTES.USER_AGENT] && !browserResource[BROWSER_ATTRIBUTES.PLATFORM] + !browserResource[BROWSER_ATTRIBUTES.USER_AGENT] && + !browserResource[BROWSER_ATTRIBUTES.PLATFORM] ) { diag.debug( 'BrowserDetector failed: Unable to find required browser resources. ' @@ -57,18 +62,20 @@ class BrowserDetector implements Detector { // Add Browser related attributes to resources function getBrowserAttributes(): ResourceAttributes { - const browserAttribs : ResourceAttributes = {}; - const userAgentData : UserAgentData | undefined = (navigator as any).userAgentData; + const browserAttribs: ResourceAttributes = {}; + const userAgentData: UserAgentData | undefined = (navigator as any) + .userAgentData; if (userAgentData) { browserAttribs[BROWSER_ATTRIBUTES.PLATFORM] = userAgentData.platform; - browserAttribs[BROWSER_ATTRIBUTES.BRANDS] = userAgentData.brands.map(b => `${b.brand} ${b.version}`); + browserAttribs[BROWSER_ATTRIBUTES.BRANDS] = userAgentData.brands.map( + b => `${b.brand} ${b.version}` + ); browserAttribs[BROWSER_ATTRIBUTES.MOBILE] = userAgentData.mobile; } else { browserAttribs[BROWSER_ATTRIBUTES.USER_AGENT] = navigator.userAgent; } - browserAttribs[BROWSER_ATTRIBUTES.LANGUAGE]=navigator.language; + browserAttribs[BROWSER_ATTRIBUTES.LANGUAGE] = navigator.language; return browserAttribs; } - export const browserDetector = new BrowserDetector(); diff --git a/experimental/packages/opentelemetry-browser-detector/src/types.ts b/experimental/packages/opentelemetry-browser-detector/src/types.ts index 52d8a6610d..c8567a4f95 100644 --- a/experimental/packages/opentelemetry-browser-detector/src/types.ts +++ b/experimental/packages/opentelemetry-browser-detector/src/types.ts @@ -14,9 +14,9 @@ * limitations under the License. */ export type UserAgentData = { - brands: {brand:string,version:string}[], - platform: string, - mobile: boolean + brands: { brand: string; version: string }[]; + platform: string; + mobile: boolean; }; export const BROWSER_ATTRIBUTES = { @@ -24,5 +24,5 @@ export const BROWSER_ATTRIBUTES = { BRANDS: 'browser.brands', MOBILE: 'browser.mobile', LANGUAGE: 'browser.language', - USER_AGENT: 'browser.user_agent' + USER_AGENT: 'browser.user_agent', }; diff --git a/experimental/packages/opentelemetry-browser-detector/test/BrowserDetector.test.ts b/experimental/packages/opentelemetry-browser-detector/test/BrowserDetector.test.ts index 07143c869b..7eb3928161 100644 --- a/experimental/packages/opentelemetry-browser-detector/test/BrowserDetector.test.ts +++ b/experimental/packages/opentelemetry-browser-detector/test/BrowserDetector.test.ts @@ -16,11 +16,7 @@ import * as sinon from 'sinon'; import { Resource } from '@opentelemetry/resources'; import { browserDetector } from '../src/BrowserDetector'; -import { - describeBrowser, - assertResource, - assertEmptyResource -} from './util'; +import { describeBrowser, assertResource, assertEmptyResource } from './util'; describeBrowser('browserDetector()', () => { afterEach(() => { @@ -33,32 +29,28 @@ describeBrowser('browserDetector()', () => { language: 'en-US', userAgentData: { platform: 'platform', - brands:[ + brands: [ { brand: 'Chromium', - version: '106' + version: '106', }, { brand: 'Google Chrome', - version: '106' + version: '106', }, { brand: 'Not;A=Brand', - version: '99' - } + version: '99', + }, ], - mobile: false - } + mobile: false, + }, }); const resource: Resource = await browserDetector.detect(); assertResource(resource, { platform: 'platform', - brands: [ - 'Chromium 106', - 'Google Chrome 106', - 'Not;A=Brand 99' - ], + brands: ['Chromium 106', 'Google Chrome 106', 'Not;A=Brand 99'], mobile: false, language: 'en-US', }); @@ -74,7 +66,7 @@ describeBrowser('browserDetector()', () => { const resource: Resource = await browserDetector.detect(); assertResource(resource, { language: 'en-US', - user_agent: 'dddd' + user_agent: 'dddd', }); }); diff --git a/experimental/packages/opentelemetry-browser-detector/test/util.ts b/experimental/packages/opentelemetry-browser-detector/test/util.ts index 62be8b3c2e..b74fd89ced 100644 --- a/experimental/packages/opentelemetry-browser-detector/test/util.ts +++ b/experimental/packages/opentelemetry-browser-detector/test/util.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {Suite} from 'mocha'; +import { Suite } from 'mocha'; import * as assert from 'assert'; -import {BROWSER_ATTRIBUTES} from '../src/types'; -import {Resource} from '@opentelemetry/resources'; +import { BROWSER_ATTRIBUTES } from '../src/types'; +import { Resource } from '@opentelemetry/resources'; export function describeBrowser(title: string, fn: (this: Suite) => void) { title = `Browser: ${title}`; @@ -29,11 +29,11 @@ export function describeBrowser(title: string, fn: (this: Suite) => void) { export const assertResource = ( resource: Resource, validations: { - platform?: string, - brands?: string[], - mobile?: boolean, - language?: string, - user_agent?: string + platform?: string; + brands?: string[]; + mobile?: boolean; + language?: string; + user_agent?: string; } ) => { if (validations.platform) { @@ -45,25 +45,25 @@ export const assertResource = ( if (validations.brands) { assert.ok(Array.isArray(resource.attributes[BROWSER_ATTRIBUTES.BRANDS])); assert.deepStrictEqual( - (resource.attributes[BROWSER_ATTRIBUTES.BRANDS] as string[]), + resource.attributes[BROWSER_ATTRIBUTES.BRANDS] as string[], validations.brands ); } if (validations.mobile) { assert.strictEqual( - (resource.attributes[BROWSER_ATTRIBUTES.MOBILE]), + resource.attributes[BROWSER_ATTRIBUTES.MOBILE], validations.mobile ); } if (validations.language) { assert.strictEqual( - (resource.attributes[BROWSER_ATTRIBUTES.LANGUAGE]), + resource.attributes[BROWSER_ATTRIBUTES.LANGUAGE], validations.language ); } if (validations.user_agent) { assert.strictEqual( - (resource.attributes[BROWSER_ATTRIBUTES.USER_AGENT]), + resource.attributes[BROWSER_ATTRIBUTES.USER_AGENT], validations.user_agent ); } diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts index 3a512e3df4..20f917e543 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts @@ -16,7 +16,7 @@ import { OTLPMetricExporterBase, - OTLPMetricExporterOptions + OTLPMetricExporterOptions, } from '@opentelemetry/exporter-metrics-otlp-http'; import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; import { @@ -24,17 +24,24 @@ import { OTLPGRPCExporterNodeBase, ServiceClientType, validateAndNormalizeUrl, - DEFAULT_COLLECTOR_URL + DEFAULT_COLLECTOR_URL, } from '@opentelemetry/otlp-grpc-exporter-base'; import { baggageUtils, getEnv } from '@opentelemetry/core'; import { Metadata } from '@grpc/grpc-js'; -import { createExportMetricsServiceRequest, IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; - -class OTLPMetricExporterProxy extends OTLPGRPCExporterNodeBase { +import { + createExportMetricsServiceRequest, + IExportMetricsServiceRequest, +} from '@opentelemetry/otlp-transformer'; +class OTLPMetricExporterProxy extends OTLPGRPCExporterNodeBase< + ResourceMetrics, + IExportMetricsServiceRequest +> { constructor(config?: OTLPGRPCExporterConfigNode & OTLPMetricExporterOptions) { super(config); - const headers = baggageUtils.parseKeyPairsIntoRecord(getEnv().OTEL_EXPORTER_OTLP_METRICS_HEADERS); + const headers = baggageUtils.parseKeyPairsIntoRecord( + getEnv().OTEL_EXPORTER_OTLP_METRICS_HEADERS + ); this.metadata ||= new Metadata(); for (const [k, v] of Object.entries(headers)) { this.metadata.set(k, v); @@ -62,16 +69,18 @@ class OTLPMetricExporterProxy extends OTLPGRPCExporterNodeBase{ +export class OTLPMetricExporter extends OTLPMetricExporterBase { constructor(config?: OTLPGRPCExporterConfigNode & OTLPMetricExporterOptions) { super(new OTLPMetricExporterProxy(config), config); } diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts index 08a30aca3e..4b8d326545 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts @@ -31,14 +31,24 @@ import { ensureResourceIsCorrect, mockCounter, mockHistogram, - mockObservableGauge, setUp, shutdown, + mockObservableGauge, + setUp, + shutdown, } from './metricsHelper'; -import { AggregationTemporality, ResourceMetrics } from '@opentelemetry/sdk-metrics'; -import { IExportMetricsServiceRequest, IResourceMetrics } from '@opentelemetry/otlp-transformer'; +import { + AggregationTemporality, + ResourceMetrics, +} from '@opentelemetry/sdk-metrics'; +import { + IExportMetricsServiceRequest, + IResourceMetrics, +} from '@opentelemetry/otlp-transformer'; const metricsServiceProtoPath = 'opentelemetry/proto/collector/metrics/v1/metrics_service.proto'; -const includeDirs = [path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos')]; +const includeDirs = [ + path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos'), +]; const address = 'localhost:1502'; @@ -56,9 +66,7 @@ const testOTLPMetricExporter = (params: TestParams) => } TLS, ${params.metadata ? 'with' : 'without'} metadata`, () => { let collectorExporter: OTLPMetricExporter; let server: grpc.Server; - let exportedData: - | IResourceMetrics[] - | undefined; + let exportedData: IResourceMetrics[] | undefined; let metrics: ResourceMetrics; let reqMetadata: grpc.Metadata | undefined; @@ -74,9 +82,8 @@ const testOTLPMetricExporter = (params: TestParams) => includeDirs, }) .then((packageDefinition: protoLoader.PackageDefinition) => { - const packageObject: any = grpc.loadPackageDefinition( - packageDefinition - ); + const packageObject: any = + grpc.loadPackageDefinition(packageDefinition); server.addService( packageObject.opentelemetry.proto.collector.metrics.v1 .MetricsService.service, @@ -96,14 +103,14 @@ const testOTLPMetricExporter = (params: TestParams) => ); const credentials = params.useTLS ? grpc.ServerCredentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - [ - { - cert_chain: fs.readFileSync('./test/certs/server.crt'), - private_key: fs.readFileSync('./test/certs/server.key'), - }, - ] - ) + fs.readFileSync('./test/certs/ca.crt'), + [ + { + cert_chain: fs.readFileSync('./test/certs/server.crt'), + private_key: fs.readFileSync('./test/certs/server.key'), + }, + ] + ) : grpc.ServerCredentials.createInsecure(); server.bindAsync(address, credentials, () => { server.start(); @@ -119,16 +126,16 @@ const testOTLPMetricExporter = (params: TestParams) => beforeEach(async () => { const credentials = params.useTLS ? grpc.credentials.createSsl( - fs.readFileSync('./test/certs/ca.crt'), - fs.readFileSync('./test/certs/client.key'), - fs.readFileSync('./test/certs/client.crt') - ) + fs.readFileSync('./test/certs/ca.crt'), + fs.readFileSync('./test/certs/client.key'), + fs.readFileSync('./test/certs/client.crt') + ) : grpc.credentials.createInsecure(); collectorExporter = new OTLPMetricExporter({ url: 'https://' + address, credentials, metadata: params.metadata, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); setUp(); @@ -162,14 +169,13 @@ const testOTLPMetricExporter = (params: TestParams) => beforeEach(() => { // Need to stub/spy on the underlying logger as the "diag" instance is global warnStub = sinon.stub(); - const nop = () => { - }; + const nop = () => {}; const diagLogger: DiagLogger = { debug: nop, error: nop, info: nop, verbose: nop, - warn: warnStub + warn: warnStub, }; diag.setLogger(diagLogger); }); @@ -184,7 +190,7 @@ const testOTLPMetricExporter = (params: TestParams) => headers: { foo: 'bar', }, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); const args = warnStub.args[0]; assert.strictEqual(args[0], 'Headers cannot be set when using grpc'); @@ -192,7 +198,7 @@ const testOTLPMetricExporter = (params: TestParams) => it('should warn about path in url', () => { collectorExporter = new OTLPMetricExporter({ url: `http://${address}/v1/metrics`, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); const args = warnStub.args[0]; assert.strictEqual( @@ -215,13 +221,21 @@ const testOTLPMetricExporter = (params: TestParams) => assert.ok(exportedData, 'exportedData does not exist'); // The order of the metrics is not guaranteed. - const counterIndex = exportedData[0].scopeMetrics[0].metrics.findIndex(it => it.name === 'int-counter'); - const observableIndex = exportedData[0].scopeMetrics[0].metrics.findIndex(it => it.name === 'double-observable-gauge'); - const histogramIndex = exportedData[0].scopeMetrics[0].metrics.findIndex(it => it.name === 'int-histogram'); + const counterIndex = + exportedData[0].scopeMetrics[0].metrics.findIndex( + it => it.name === 'int-counter' + ); + const observableIndex = + exportedData[0].scopeMetrics[0].metrics.findIndex( + it => it.name === 'double-observable-gauge' + ); + const histogramIndex = + exportedData[0].scopeMetrics[0].metrics.findIndex( + it => it.name === 'int-histogram' + ); const resource = exportedData[0].resource; - const counter = - exportedData[0].scopeMetrics[0].metrics[counterIndex]; + const counter = exportedData[0].scopeMetrics[0].metrics[counterIndex]; const observableGauge = exportedData[0].scopeMetrics[0].metrics[observableIndex]; const histogram = @@ -243,10 +257,7 @@ const testOTLPMetricExporter = (params: TestParams) => [0, 100], ['0', '2', '0'] ); - assert.ok( - typeof resource !== 'undefined', - "resource doesn't exist" - ); + assert.ok(typeof resource !== 'undefined', "resource doesn't exist"); ensureResourceIsCorrect(resource); ensureMetadataIsCorrect(reqMetadata, params.metadata); @@ -269,7 +280,7 @@ describe('OTLPMetricExporter - node (getDefaultUrl)', () => { const url = 'http://foo.bar.com'; const collectorExporter = new OTLPMetricExporter({ url, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); setTimeout(() => { assert.strictEqual(collectorExporter._otlpExporter.url, 'foo.bar.com'); @@ -283,27 +294,24 @@ describe('when configuring via environment', () => { it('should use url defined in env', () => { envSource.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://foo.bar'; const collectorExporter = new OTLPMetricExporter(); - assert.strictEqual( - collectorExporter._otlpExporter.url, - 'foo.bar' - ); + assert.strictEqual(collectorExporter._otlpExporter.url, 'foo.bar'); envSource.OTEL_EXPORTER_OTLP_ENDPOINT = ''; }); it('should override global exporter url with signal url defined in env', () => { envSource.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://foo.bar'; envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = 'http://foo.metrics'; const collectorExporter = new OTLPMetricExporter(); - assert.strictEqual( - collectorExporter._otlpExporter.url, - 'foo.metrics' - ); + assert.strictEqual(collectorExporter._otlpExporter.url, 'foo.metrics'); envSource.OTEL_EXPORTER_OTLP_ENDPOINT = ''; envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = ''; }); it('should use headers defined via env', () => { envSource.OTEL_EXPORTER_OTLP_HEADERS = 'foo=bar'; const collectorExporter = new OTLPMetricExporter(); - assert.deepStrictEqual(collectorExporter._otlpExporter.metadata?.get('foo'), ['bar']); + assert.deepStrictEqual( + collectorExporter._otlpExporter.metadata?.get('foo'), + ['bar'] + ); envSource.OTEL_EXPORTER_OTLP_HEADERS = ''; }); it('should override global headers config with signal headers defined via env', () => { @@ -314,11 +322,20 @@ describe('when configuring via environment', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_HEADERS = 'foo=boo'; const collectorExporter = new OTLPMetricExporter({ metadata, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); - assert.deepStrictEqual(collectorExporter._otlpExporter.metadata?.get('foo'), ['boo']); - assert.deepStrictEqual(collectorExporter._otlpExporter.metadata?.get('bar'), ['foo']); - assert.deepStrictEqual(collectorExporter._otlpExporter.metadata?.get('goo'), ['lol']); + assert.deepStrictEqual( + collectorExporter._otlpExporter.metadata?.get('foo'), + ['boo'] + ); + assert.deepStrictEqual( + collectorExporter._otlpExporter.metadata?.get('bar'), + ['foo'] + ); + assert.deepStrictEqual( + collectorExporter._otlpExporter.metadata?.get('goo'), + ['lol'] + ); envSource.OTEL_EXPORTER_OTLP_METRICS_HEADERS = ''; envSource.OTEL_EXPORTER_OTLP_HEADERS = ''; }); diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/metricsHelper.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/metricsHelper.ts index 9a11c8b6f2..4713e7aa3f 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/metricsHelper.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/metricsHelper.ts @@ -14,7 +14,13 @@ * limitations under the License. */ -import { Counter, Histogram, ObservableGauge, ObservableResult, ValueType } from '@opentelemetry/api'; +import { + Counter, + Histogram, + ObservableGauge, + ObservableResult, + ValueType, +} from '@opentelemetry/api'; import { Resource } from '@opentelemetry/resources'; import * as assert from 'assert'; import * as grpc from '@grpc/grpc-js'; @@ -61,13 +67,11 @@ export function setUp() { new View({ aggregation: new ExplicitBucketHistogramAggregation([0, 100]), instrumentName: 'int-histogram', - }) - ] + }), + ], }); reader = new TestMetricReader(); - meterProvider.addMetricReader( - reader - ); + meterProvider.addMetricReader(reader); meter = meterProvider.getMeter('default', '0.0.1'); } @@ -87,13 +91,10 @@ export function mockObservableGauge( callback: (observableResult: ObservableResult) => void ): ObservableGauge { const name = 'double-observable-gauge'; - const observableGauge = meter.createObservableGauge( - name, - { - description: 'sample observable gauge description', - valueType: ValueType.DOUBLE, - }, - ); + const observableGauge = meter.createObservableGauge(name, { + description: 'sample observable gauge description', + valueType: ValueType.DOUBLE, + }); observableGauge.addCallback(callback); return observableGauge; @@ -106,9 +107,7 @@ export function mockHistogram(): Histogram { }); } -export function ensureExportedAttributesAreCorrect( - attributes: IKeyValue[] -) { +export function ensureExportedAttributesAreCorrect(attributes: IKeyValue[]) { assert.deepStrictEqual( attributes, [ @@ -214,38 +213,36 @@ export function ensureExportedHistogramIsCorrect( }); } -export function ensureResourceIsCorrect( - resource: IResource -) { +export function ensureResourceIsCorrect(resource: IResource) { assert.deepStrictEqual(resource, { attributes: [ { - 'key': 'service.name', - 'value': { - 'stringValue': `unknown_service:${process.argv0}`, - 'value': 'stringValue' - } + key: 'service.name', + value: { + stringValue: `unknown_service:${process.argv0}`, + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.language', - 'value': { - 'stringValue': 'nodejs', - 'value': 'stringValue' - } + key: 'telemetry.sdk.language', + value: { + stringValue: 'nodejs', + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.name', - 'value': { - 'stringValue': 'opentelemetry', - 'value': 'stringValue' - } + key: 'telemetry.sdk.name', + value: { + stringValue: 'opentelemetry', + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.version', - 'value': { - 'stringValue': VERSION, - 'value': 'stringValue' - } + key: 'telemetry.sdk.version', + value: { + stringValue: VERSION, + value: 'stringValue', + }, }, { key: 'service', diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterBase.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterBase.ts index fca5a54d25..bbebe6cd1f 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterBase.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterBase.ts @@ -14,27 +14,25 @@ * limitations under the License. */ -import { - ExportResult, - getEnv -} from '@opentelemetry/core'; +import { ExportResult, getEnv } from '@opentelemetry/core'; import { AggregationTemporality, AggregationTemporalitySelector, InstrumentType, PushMetricExporter, - ResourceMetrics + ResourceMetrics, } from '@opentelemetry/sdk-metrics'; -import { - OTLPMetricExporterOptions -} from './OTLPMetricExporterOptions'; +import { OTLPMetricExporterOptions } from './OTLPMetricExporterOptions'; import { OTLPExporterBase } from '@opentelemetry/otlp-exporter-base'; import { IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; import { diag } from '@opentelemetry/api'; -export const CumulativeTemporalitySelector: AggregationTemporalitySelector = () => AggregationTemporality.CUMULATIVE; +export const CumulativeTemporalitySelector: AggregationTemporalitySelector = + () => AggregationTemporality.CUMULATIVE; -export const DeltaTemporalitySelector: AggregationTemporalitySelector = (instrumentType: InstrumentType) => { +export const DeltaTemporalitySelector: AggregationTemporalitySelector = ( + instrumentType: InstrumentType +) => { switch (instrumentType) { case InstrumentType.COUNTER: case InstrumentType.OBSERVABLE_COUNTER: @@ -49,7 +47,8 @@ export const DeltaTemporalitySelector: AggregationTemporalitySelector = (instrum function chooseTemporalitySelectorFromEnvironment() { const env = getEnv(); - const configuredTemporality = env.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE.trim().toLowerCase(); + const configuredTemporality = + env.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE.trim().toLowerCase(); if (configuredTemporality === 'cumulative') { return CumulativeTemporalitySelector; @@ -58,11 +57,15 @@ function chooseTemporalitySelectorFromEnvironment() { return DeltaTemporalitySelector; } - diag.warn(`OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE is set to '${env.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE}', but only 'cumulative' and 'delta' are allowed. Using default ('cumulative') instead.`); + diag.warn( + `OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE is set to '${env.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE}', but only 'cumulative' and 'delta' are allowed. Using default ('cumulative') instead.` + ); return CumulativeTemporalitySelector; } -function chooseTemporalitySelector(temporalityPreference?: AggregationTemporality): AggregationTemporalitySelector { +function chooseTemporalitySelector( + temporalityPreference?: AggregationTemporality +): AggregationTemporalitySelector { // Directly passed preference has priority. if (temporalityPreference != null) { if (temporalityPreference === AggregationTemporality.DELTA) { @@ -74,20 +77,28 @@ function chooseTemporalitySelector(temporalityPreference?: AggregationTemporalit return chooseTemporalitySelectorFromEnvironment(); } -export class OTLPMetricExporterBase> -implements PushMetricExporter { +export class OTLPMetricExporterBase< + T extends OTLPExporterBase< + OTLPMetricExporterOptions, + ResourceMetrics, + IExportMetricsServiceRequest + > +> implements PushMetricExporter +{ public _otlpExporter: T; protected _aggregationTemporalitySelector: AggregationTemporalitySelector; - constructor(exporter: T, - config?: OTLPMetricExporterOptions) { + constructor(exporter: T, config?: OTLPMetricExporterOptions) { this._otlpExporter = exporter; - this._aggregationTemporalitySelector = chooseTemporalitySelector(config?.temporalityPreference); + this._aggregationTemporalitySelector = chooseTemporalitySelector( + config?.temporalityPreference + ); } - export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void { + export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void + ): void { this._otlpExporter.export([metrics], resultCallback); } @@ -99,7 +110,9 @@ implements PushMetricExporter { return Promise.resolve(); } - selectAggregationTemporality(instrumentType: InstrumentType): AggregationTemporality { + selectAggregationTemporality( + instrumentType: InstrumentType + ): AggregationTemporality { return this._aggregationTemporalitySelector(instrumentType); } } diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterOptions.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterOptions.ts index 324c4ea27f..32529efee1 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterOptions.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/OTLPMetricExporterOptions.ts @@ -18,5 +18,5 @@ import { AggregationTemporality } from '@opentelemetry/sdk-metrics'; import { OTLPExporterConfigBase } from '@opentelemetry/otlp-exporter-base'; export interface OTLPMetricExporterOptions extends OTLPExporterConfigBase { - temporalityPreference?: AggregationTemporality + temporalityPreference?: AggregationTemporality; } diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/browser/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/browser/OTLPMetricExporter.ts index 00c7c0c32c..73c99a0693 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/browser/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/browser/OTLPMetricExporter.ts @@ -22,15 +22,20 @@ import { OTLPExporterBrowserBase, OTLPExporterConfigBase, appendResourcePathToUrl, - appendRootPathToUrlIfNeeded + appendRootPathToUrlIfNeeded, } from '@opentelemetry/otlp-exporter-base'; -import { createExportMetricsServiceRequest, IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + createExportMetricsServiceRequest, + IExportMetricsServiceRequest, +} from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/metrics'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; -class OTLPExporterBrowserProxy extends OTLPExporterBrowserBase { - +class OTLPExporterBrowserProxy extends OTLPExporterBrowserBase< + ResourceMetrics, + IExportMetricsServiceRequest +> { constructor(config?: OTLPMetricExporterOptions & OTLPExporterConfigBase) { super(config); this._headers = Object.assign( @@ -45,10 +50,15 @@ class OTLPExporterBrowserProxy extends OTLPExporterBrowserBase 0 - ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) - : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 - ? appendResourcePathToUrl(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH) - : DEFAULT_COLLECTOR_URL; + ? appendRootPathToUrlIfNeeded( + getEnv().OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + ) + : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 + ? appendResourcePathToUrl( + getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, + DEFAULT_COLLECTOR_RESOURCE_PATH + ) + : DEFAULT_COLLECTOR_URL; } convert(metrics: ResourceMetrics[]): IExportMetricsServiceRequest { diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts index d09693a253..980931a6cb 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts @@ -15,22 +15,27 @@ */ import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; -import { getEnv, baggageUtils} from '@opentelemetry/core'; +import { getEnv, baggageUtils } from '@opentelemetry/core'; import { OTLPMetricExporterOptions } from '../../OTLPMetricExporterOptions'; import { OTLPMetricExporterBase } from '../../OTLPMetricExporterBase'; import { OTLPExporterNodeBase, OTLPExporterNodeConfigBase, appendResourcePathToUrl, - appendRootPathToUrlIfNeeded + appendRootPathToUrlIfNeeded, } from '@opentelemetry/otlp-exporter-base'; -import { createExportMetricsServiceRequest, IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + createExportMetricsServiceRequest, + IExportMetricsServiceRequest, +} from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/metrics'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; -class OTLPExporterNodeProxy extends OTLPExporterNodeBase { - +class OTLPExporterNodeProxy extends OTLPExporterNodeBase< + ResourceMetrics, + IExportMetricsServiceRequest +> { constructor(config?: OTLPExporterNodeConfigBase & OTLPMetricExporterOptions) { super(config); this.headers = Object.assign( @@ -49,10 +54,15 @@ class OTLPExporterNodeProxy extends OTLPExporterNodeBase 0 - ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) - : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 - ? appendResourcePathToUrl(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH) - : DEFAULT_COLLECTOR_URL; + ? appendRootPathToUrlIfNeeded( + getEnv().OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + ) + : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 + ? appendResourcePathToUrl( + getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, + DEFAULT_COLLECTOR_RESOURCE_PATH + ) + : DEFAULT_COLLECTOR_URL; } } diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/browser/CollectorMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/browser/CollectorMetricExporter.test.ts index c0cb9dc2ab..7dbc7cc889 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/browser/CollectorMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/browser/CollectorMetricExporter.test.ts @@ -14,19 +14,30 @@ * limitations under the License. */ -import { diag, DiagLogger, DiagLogLevel, Counter, Histogram } from '@opentelemetry/api'; +import { + diag, + DiagLogger, + DiagLogLevel, + Counter, + Histogram, +} from '@opentelemetry/api'; import { ExportResultCode, hrTimeToNanoseconds } from '@opentelemetry/core'; -import { AggregationTemporality, ResourceMetrics, } from '@opentelemetry/sdk-metrics'; +import { + AggregationTemporality, + ResourceMetrics, +} from '@opentelemetry/sdk-metrics'; import * as assert from 'assert'; import * as sinon from 'sinon'; import { OTLPMetricExporter } from '../../src/platform/browser'; import { collect, ensureCounterIsCorrect, - ensureExportMetricsServiceRequestIsSet, ensureHeadersContain, + ensureExportMetricsServiceRequestIsSet, + ensureHeadersContain, ensureHistogramIsCorrect, ensureObservableGaugeIsCorrect, - ensureWebResourceIsCorrect, HISTOGRAM_AGGREGATION_VIEW, + ensureWebResourceIsCorrect, + HISTOGRAM_AGGREGATION_VIEW, mockCounter, mockHistogram, mockObservableGauge, @@ -52,13 +63,10 @@ describe('OTLPMetricExporter - web', () => { stubBeacon = sinon.stub(navigator, 'sendBeacon'); const counter: Counter = mockCounter(); - mockObservableGauge( - observableResult => { - observableResult.observe(3, {}); - observableResult.observe(6, {}); - }, - 'double-observable-gauge2' - ); + mockObservableGauge(observableResult => { + observableResult.observe(3, {}); + observableResult.observe(6, {}); + }, 'double-observable-gauge2'); const histogram: Histogram = mockHistogram(); counter.add(1); @@ -72,14 +80,13 @@ describe('OTLPMetricExporter - web', () => { // Need to stub/spy on the underlying logger as the "diag" instance is global debugStub = sinon.stub(); errorStub = sinon.stub(); - const nop = () => { - }; + const nop = () => {}; const diagLogger: DiagLogger = { debug: debugStub, error: errorStub, info: nop, verbose: nop, - warn: nop + warn: nop, }; diag.setLogger(diagLogger, DiagLogLevel.DEBUG); }); @@ -95,13 +102,12 @@ describe('OTLPMetricExporter - web', () => { beforeEach(() => { collectorExporter = new OTLPMetricExporter({ url: 'http://foo.bar.com', - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); }); it('should successfully send metrics using sendBeacon', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(async () => { const args = stubBeacon.args[0]; @@ -111,31 +117,51 @@ describe('OTLPMetricExporter - web', () => { const json = JSON.parse(body) as IExportMetricsServiceRequest; // The order of the metrics is not guaranteed. - const counterIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-counter'); - const observableIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'double-observable-gauge2'); - const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-histogram'); + const counterIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-counter' + ); + const observableIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'double-observable-gauge2' + ); + const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-histogram' + ); - const metric1 = json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; - const metric2 = json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; - const metric3 = json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; + const metric1 = + json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; + const metric2 = + json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; + const metric3 = + json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; assert.ok(typeof metric1 !== 'undefined', "metric doesn't exist"); ensureCounterIsCorrect( metric1, - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].endTime), - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].startTime) + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0] + .endTime + ), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0] + .startTime + ) ); - assert.ok( typeof metric2 !== 'undefined', "second metric doesn't exist" ); ensureObservableGaugeIsCorrect( metric2, - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0].endTime), - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0].startTime), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0] + .endTime + ), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0] + .startTime + ), 6, 'double-observable-gauge2' ); @@ -146,8 +172,14 @@ describe('OTLPMetricExporter - web', () => { ); ensureHistogramIsCorrect( metric3, - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0].endTime), - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0].startTime), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0] + .endTime + ), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0] + .startTime + ), [0, 100], [0, 2, 0] ); @@ -170,8 +202,7 @@ describe('OTLPMetricExporter - web', () => { it('should log the successful message', done => { stubBeacon.returns(true); - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const response: any = debugStub.args[2][0]; @@ -199,7 +230,7 @@ describe('OTLPMetricExporter - web', () => { (window.navigator as any).sendBeacon = false; collectorExporter = new OTLPMetricExporter({ url: 'http://foo.bar.com', - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); // Overwrites the start time to make tests consistent Object.defineProperty(collectorExporter, '_startTime', { @@ -212,8 +243,7 @@ describe('OTLPMetricExporter - web', () => { }); it('should successfully send the metrics using XMLHttpRequest', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const request = server.requests[0]; @@ -223,19 +253,34 @@ describe('OTLPMetricExporter - web', () => { const body = request.requestBody; const json = JSON.parse(body) as IExportMetricsServiceRequest; // The order of the metrics is not guaranteed. - const counterIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-counter'); - const observableIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'double-observable-gauge2'); - const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-histogram'); + const counterIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-counter' + ); + const observableIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'double-observable-gauge2' + ); + const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-histogram' + ); - const metric1 = json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; - const metric2 = json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; - const metric3 = json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; + const metric1 = + json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; + const metric2 = + json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; + const metric3 = + json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; assert.ok(typeof metric1 !== 'undefined', "metric doesn't exist"); ensureCounterIsCorrect( metric1, - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].endTime), - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].startTime) + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0] + .endTime + ), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0] + .startTime + ) ); assert.ok( @@ -244,8 +289,14 @@ describe('OTLPMetricExporter - web', () => { ); ensureObservableGaugeIsCorrect( metric2, - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0].endTime), - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0].startTime), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0] + .endTime + ), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0] + .startTime + ), 6, 'double-observable-gauge2' ); @@ -256,8 +307,14 @@ describe('OTLPMetricExporter - web', () => { ); ensureHistogramIsCorrect( metric3, - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0].endTime), - hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0].startTime), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0] + .endTime + ), + hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0] + .startTime + ), [0, 100], [0, 2, 0] ); @@ -274,8 +331,7 @@ describe('OTLPMetricExporter - web', () => { }); it('should log the successful message', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const request = server.requests[0]; @@ -304,8 +360,7 @@ describe('OTLPMetricExporter - web', () => { }); }); it('should send custom headers', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const request = server.requests[0]; @@ -324,12 +379,14 @@ describe('OTLPMetricExporter - web', () => { foo: 'bar', bar: 'baz', }; - let collectorExporterConfig: (OTLPExporterConfigBase & OTLPMetricExporterOptions) | undefined; + let collectorExporterConfig: + | (OTLPExporterConfigBase & OTLPMetricExporterOptions) + | undefined; beforeEach(() => { collectorExporterConfig = { headers: customHeaders, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }; server = sinon.fakeServer.create(); }); @@ -340,13 +397,10 @@ describe('OTLPMetricExporter - web', () => { describe('when "sendBeacon" is available', () => { beforeEach(() => { - collectorExporter = new OTLPMetricExporter( - collectorExporterConfig - ); + collectorExporter = new OTLPMetricExporter(collectorExporterConfig); }); it('should successfully send custom headers using XMLHTTPRequest', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const [{ requestHeaders }] = server.requests; @@ -363,14 +417,11 @@ describe('OTLPMetricExporter - web', () => { describe('when "sendBeacon" is NOT available', () => { beforeEach(() => { (window.navigator as any).sendBeacon = false; - collectorExporter = new OTLPMetricExporter( - collectorExporterConfig - ); + collectorExporter = new OTLPMetricExporter(collectorExporterConfig); }); it('should successfully send metrics using XMLHttpRequest', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const [{ requestHeaders }] = server.requests; @@ -454,7 +505,8 @@ describe('when configuring via environment', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path and ends in /', () => { - envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = 'http://foo.bar/v1/metrics/'; + envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = + 'http://foo.bar/v1/metrics/'; const collectorExporter = new OTLPMetricExporter(); assert.strictEqual( collectorExporter._otlpExporter.url, @@ -466,9 +518,12 @@ describe('when configuring via environment', () => { envSource.OTEL_EXPORTER_OTLP_HEADERS = 'foo=bar'; const collectorExporter = new OTLPMetricExporter({ headers: {}, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); - assert.strictEqual(collectorExporter['_otlpExporter']['_headers'].foo, 'bar'); + assert.strictEqual( + collectorExporter['_otlpExporter']['_headers'].foo, + 'bar' + ); envSource.OTEL_EXPORTER_OTLP_HEADERS = ''; }); it('should override global headers config with signal headers defined via env', () => { @@ -476,10 +531,16 @@ describe('when configuring via environment', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_HEADERS = 'foo=boo'; const collectorExporter = new OTLPMetricExporter({ headers: {}, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); - assert.strictEqual(collectorExporter['_otlpExporter']['_headers'].foo, 'boo'); - assert.strictEqual(collectorExporter['_otlpExporter']['_headers'].bar, 'foo'); + assert.strictEqual( + collectorExporter['_otlpExporter']['_headers'].foo, + 'boo' + ); + assert.strictEqual( + collectorExporter['_otlpExporter']['_headers'].bar, + 'foo' + ); envSource.OTEL_EXPORTER_OTLP_METRICS_HEADERS = ''; envSource.OTEL_EXPORTER_OTLP_HEADERS = ''; }); diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/common/CollectorMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/common/CollectorMetricExporter.test.ts index 406e20f612..963328da75 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/common/CollectorMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/common/CollectorMetricExporter.test.ts @@ -15,9 +15,7 @@ */ import { ExportResultCode } from '@opentelemetry/core'; -import { - ResourceMetrics, -} from '@opentelemetry/sdk-metrics'; +import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; import * as assert from 'assert'; import * as sinon from 'sinon'; import { @@ -25,19 +23,21 @@ import { mockCounter, mockObservableGauge, setUp, - shutdown + shutdown, } from '../metricsHelper'; import { OTLPExporterBase, - OTLPExporterConfigBase + OTLPExporterConfigBase, } from '@opentelemetry/otlp-exporter-base'; import { IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; type CollectorExporterConfig = OTLPExporterConfigBase; -class OTLPMetricExporter extends OTLPExporterBase { + IExportMetricsServiceRequest +> { onInit() {} onShutdown() {} @@ -78,13 +78,10 @@ describe('OTLPMetricExporter - common', () => { }; collectorExporter = new OTLPMetricExporter(collectorExporterConfig); const counter = mockCounter(); - mockObservableGauge( - observableResult => { - observableResult.observe(3, {}); - observableResult.observe(6, {}); - }, - 'double-observable-gauge3' - ); + mockObservableGauge(observableResult => { + observableResult.observe(3, {}); + observableResult.observe(6, {}); + }, 'double-observable-gauge3'); counter.add(1); const { resourceMetrics, errors } = await collect(); @@ -137,7 +134,7 @@ describe('OTLPMetricExporter - common', () => { describe('when exporter is shutdown', () => { it( 'should not export anything but return callback with code' + - ' "FailedNotRetryable"', + ' "FailedNotRetryable"', async () => { await collectorExporter.shutdown(); spySend.resetHistory(); @@ -187,10 +184,7 @@ describe('OTLPMetricExporter - common', () => { describe('shutdown', () => { let onShutdownSpy: any; beforeEach(() => { - onShutdownSpy = sinon.stub( - OTLPMetricExporter.prototype, - 'onShutdown' - ); + onShutdownSpy = sinon.stub(OTLPMetricExporter.prototype, 'onShutdown'); collectorExporterConfig = { hostname: 'foo', url: 'http://foo.bar.com', diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/metricsHelper.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/metricsHelper.ts index 5989d870f6..6796c155f0 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/metricsHelper.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/metricsHelper.ts @@ -30,13 +30,13 @@ import { ExplicitBucketHistogramAggregation, MeterProvider, MetricReader, - View + View, } from '@opentelemetry/sdk-metrics'; import { IExportMetricsServiceRequest, IKeyValue, IMetric, - IResource + IResource, } from '@opentelemetry/otlp-transformer'; if (typeof Buffer === 'undefined') { @@ -62,17 +62,17 @@ export const HISTOGRAM_AGGREGATION_VIEW = new View({ instrumentName: 'int-histogram', }); -const defaultResource = Resource.default().merge(new Resource({ - service: 'ui', - version: 1, - cost: 112.12, -})); +const defaultResource = Resource.default().merge( + new Resource({ + service: 'ui', + version: 1, + cost: 112.12, + }) +); let meterProvider = new MeterProvider({ resource: defaultResource }); let reader = new TestMetricReader(); -meterProvider.addMetricReader( - reader -); +meterProvider.addMetricReader(reader); let meter = meterProvider.getMeter('default', '0.0.1'); export async function collect() { @@ -82,9 +82,7 @@ export async function collect() { export function setUp(views?: View[]) { meterProvider = new MeterProvider({ resource: defaultResource, views }); reader = new TestMetricReader(); - meterProvider.addMetricReader( - reader - ); + meterProvider.addMetricReader(reader); meter = meterProvider.getMeter('default', '0.0.1'); } @@ -104,13 +102,10 @@ export function mockObservableGauge( callback: (observableResult: ObservableResult) => void, name = 'double-observable-gauge' ): ObservableGauge { - const observableGauge = meter.createObservableGauge( - name, - { - description: 'sample observable gauge description', - valueType: ValueType.DOUBLE, - } - ); + const observableGauge = meter.createObservableGauge(name, { + description: 'sample observable gauge description', + valueType: ValueType.DOUBLE, + }); observableGauge.addCallback(callback); return observableGauge; @@ -124,18 +119,14 @@ export function mockDoubleCounter(): Counter { }); } - export function mockObservableCounter( callback: (observableResult: ObservableResult) => void, name = 'double-observable-counter' ): ObservableCounter { - const observableCounter = meter.createObservableCounter( - name, - { - description: 'sample observable counter description', - valueType: ValueType.DOUBLE, - } - ); + const observableCounter = meter.createObservableCounter(name, { + description: 'sample observable counter description', + valueType: ValueType.DOUBLE, + }); observableCounter.addCallback(callback); return observableCounter; @@ -145,13 +136,10 @@ export function mockObservableUpDownCounter( callback: (observableResult: ObservableResult) => void, name = 'double-up-down-observable-counter' ): ObservableUpDownCounter { - const observableUpDownCounter = meter.createObservableUpDownCounter( - name, - { - description: 'sample observable up down counter description', - valueType: ValueType.DOUBLE, - }, - ); + const observableUpDownCounter = meter.createObservableUpDownCounter(name, { + description: 'sample observable up down counter description', + valueType: ValueType.DOUBLE, + }); observableUpDownCounter.addCallback(callback); return observableUpDownCounter; @@ -180,9 +168,7 @@ export const mockedInstrumentationLibraries: InstrumentationScope[] = [ }, ]; -export function ensureAttributesAreCorrect( - attributes: IKeyValue[] -) { +export function ensureAttributesAreCorrect(attributes: IKeyValue[]) { assert.deepStrictEqual( attributes, [ @@ -197,12 +183,13 @@ export function ensureAttributesAreCorrect( ); } -export function ensureWebResourceIsCorrect( - resource: IResource -) { +export function ensureWebResourceIsCorrect(resource: IResource) { assert.strictEqual(resource.attributes.length, 7); assert.strictEqual(resource.attributes[0].key, 'service.name'); - assert.strictEqual(resource.attributes[0].value.stringValue, 'unknown_service'); + assert.strictEqual( + resource.attributes[0].value.stringValue, + 'unknown_service' + ); assert.strictEqual(resource.attributes[1].key, 'telemetry.sdk.language'); assert.strictEqual(resource.attributes[1].value.stringValue, 'webjs'); assert.strictEqual(resource.attributes[2].key, 'telemetry.sdk.name'); @@ -285,7 +272,7 @@ export function ensureObservableGaugeIsCorrect( startTimeUnixNano: startTime, timeUnixNano: time, }, - ] + ], }, }); } @@ -311,7 +298,7 @@ export function ensureObservableCounterIsCorrect( timeUnixNano: time, }, ], - aggregationTemporality: 2 + aggregationTemporality: 2, }, }); } @@ -337,7 +324,7 @@ export function ensureObservableUpDownCounterIsCorrect( timeUnixNano: time, }, ], - aggregationTemporality: 2 + aggregationTemporality: 2, }, }); } @@ -367,7 +354,7 @@ export function ensureHistogramIsCorrect( explicitBounds, }, ], - aggregationTemporality: 2 + aggregationTemporality: 2, }, }); } diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/node/CollectorMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/node/CollectorMetricExporter.test.ts index 68dd569d89..d23ca628f7 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/node/CollectorMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/test/node/CollectorMetricExporter.test.ts @@ -14,11 +14,7 @@ * limitations under the License. */ - -import { - diag, - DiagLogger -} from '@opentelemetry/api'; +import { diag, DiagLogger } from '@opentelemetry/api'; import * as core from '@opentelemetry/core'; import * as assert from 'assert'; import * as http from 'http'; @@ -26,12 +22,10 @@ import * as sinon from 'sinon'; import { CumulativeTemporalitySelector, DeltaTemporalitySelector, - OTLPMetricExporterOptions + OTLPMetricExporterOptions, } from '../../src'; -import { - OTLPMetricExporter -} from '../../src/platform/node'; +import { OTLPMetricExporter } from '../../src/platform/node'; import { collect, ensureCounterIsCorrect, @@ -48,15 +42,12 @@ import { import { MockedResponse } from './nodeHelpers'; import { AggregationTemporality, - ResourceMetrics + ResourceMetrics, } from '@opentelemetry/sdk-metrics'; -import { - PassThrough, - Stream -} from 'stream'; +import { PassThrough, Stream } from 'stream'; import { OTLPExporterError, - OTLPExporterNodeConfigBase + OTLPExporterNodeConfigBase, } from '@opentelemetry/otlp-exporter-base'; import { IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; @@ -66,7 +57,8 @@ const address = 'localhost:1501'; describe('OTLPMetricExporter - node with json over http', () => { let collectorExporter: OTLPMetricExporter; - let collectorExporterConfig: OTLPExporterNodeConfigBase & OTLPMetricExporterOptions; + let collectorExporterConfig: OTLPExporterNodeConfigBase & + OTLPMetricExporterOptions; let stubRequest: sinon.SinonStub; let metrics: ResourceMetrics; @@ -86,14 +78,13 @@ describe('OTLPMetricExporter - node with json over http', () => { beforeEach(() => { // Need to stub/spy on the underlying logger as the "diag" instance is global warnStub = sinon.stub(); - const nop = () => { - }; + const nop = () => {}; const diagLogger: DiagLogger = { debug: nop, error: nop, info: nop, verbose: nop, - warn: warnStub + warn: warnStub, }; diag.setLogger(diagLogger); }); @@ -172,7 +163,8 @@ describe('OTLPMetricExporter - node with json over http', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path', () => { - envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = 'http://foo.bar/v1/metrics'; + envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = + 'http://foo.bar/v1/metrics'; const collectorExporter = new OTLPMetricExporter(); assert.strictEqual( collectorExporter._otlpExporter.url, @@ -181,7 +173,8 @@ describe('OTLPMetricExporter - node with json over http', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path and ends in /', () => { - envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = 'http://foo.bar/v1/metrics/'; + envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = + 'http://foo.bar/v1/metrics/'; const collectorExporter = new OTLPMetricExporter(); assert.strictEqual( collectorExporter._otlpExporter.url, @@ -208,29 +201,47 @@ describe('OTLPMetricExporter - node with json over http', () => { for (const envValue of ['delta', 'DELTA', 'DeLTa', 'delta ']) { envSource.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE = envValue; const exporter = new OTLPMetricExporter(); - assert.strictEqual(exporter['_aggregationTemporalitySelector'], DeltaTemporalitySelector); + assert.strictEqual( + exporter['_aggregationTemporalitySelector'], + DeltaTemporalitySelector + ); } }); it('should use cumulative temporality defined via env', () => { - for (const envValue of ['cumulative', 'CUMULATIVE', 'CuMULaTIvE', 'cumulative ']) { + for (const envValue of [ + 'cumulative', + 'CUMULATIVE', + 'CuMULaTIvE', + 'cumulative ', + ]) { envSource.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE = envValue; const exporter = new OTLPMetricExporter(); - assert.strictEqual(exporter['_aggregationTemporalitySelector'], CumulativeTemporalitySelector); + assert.strictEqual( + exporter['_aggregationTemporalitySelector'], + CumulativeTemporalitySelector + ); } }); it('should configure cumulative temporality with invalid value in env', () => { for (const envValue of ['invalid', ' ']) { envSource.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE = envValue; const exporter = new OTLPMetricExporter(); - assert.strictEqual(exporter['_aggregationTemporalitySelector'], CumulativeTemporalitySelector); + assert.strictEqual( + exporter['_aggregationTemporalitySelector'], + CumulativeTemporalitySelector + ); } }); it('should respect explicit config over environment variable', () => { - envSource.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE = 'cumulative'; + envSource.OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE = + 'cumulative'; const exporter = new OTLPMetricExporter({ - temporalityPreference: AggregationTemporality.DELTA + temporalityPreference: AggregationTemporality.DELTA, }); - assert.strictEqual(exporter['_aggregationTemporalitySelector'], DeltaTemporalitySelector); + assert.strictEqual( + exporter['_aggregationTemporalitySelector'], + DeltaTemporalitySelector + ); }); }); @@ -245,18 +256,15 @@ describe('OTLPMetricExporter - node with json over http', () => { url: 'http://foo.bar.com', keepAlive: true, httpAgentOptions: { keepAliveMsecs: 2000 }, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }; collectorExporter = new OTLPMetricExporter(collectorExporterConfig); const counter = mockCounter(); - mockObservableGauge( - observableResult => { - observableResult.observe(6, {}); - }, - 'double-observable-gauge2' - ); + mockObservableGauge(observableResult => { + observableResult.observe(6, {}); + }, 'double-observable-gauge2'); const histogram = mockHistogram(); counter.add(1); histogram.record(7); @@ -268,8 +276,7 @@ describe('OTLPMetricExporter - node with json over http', () => { }); it('should open the connection', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -287,8 +294,7 @@ describe('OTLPMetricExporter - node with json over http', () => { }); it('should set custom headers', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -303,8 +309,7 @@ describe('OTLPMetricExporter - node with json over http', () => { }); it('should have keep alive and keepAliveMsecs option set', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); setTimeout(() => { const mockRes = new MockedResponse(200); @@ -323,41 +328,69 @@ describe('OTLPMetricExporter - node with json over http', () => { it('should successfully send metrics', done => { let buff = Buffer.from(''); - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); fakeRequest.on('end', () => { const responseBody = buff.toString(); const json = JSON.parse(responseBody) as IExportMetricsServiceRequest; // The order of the metrics is not guaranteed. - const counterIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-counter'); - const observableIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'double-observable-gauge2'); - const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-histogram'); + const counterIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-counter' + ); + const observableIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'double-observable-gauge2' + ); + const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-histogram' + ); - const metric1 = json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; - const metric2 = json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; - const metric3 = json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; + const metric1 = + json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; + const metric2 = + json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; + const metric3 = + json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; assert.ok(typeof metric1 !== 'undefined', "counter doesn't exist"); ensureCounterIsCorrect( metric1, - core.hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].endTime), - core.hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].startTime) + core.hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0].endTime + ), + core.hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[counterIndex].dataPoints[0] + .startTime + ) + ); + assert.ok( + typeof metric2 !== 'undefined', + "observable gauge doesn't exist" ); - assert.ok(typeof metric2 !== 'undefined', "observable gauge doesn't exist"); ensureObservableGaugeIsCorrect( metric2, - core.hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0].endTime), - core.hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0].startTime), + core.hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0] + .endTime + ), + core.hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[observableIndex].dataPoints[0] + .startTime + ), 6, 'double-observable-gauge2' ); assert.ok(typeof metric3 !== 'undefined', "histogram doesn't exist"); ensureHistogramIsCorrect( metric3, - core.hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0].endTime), - core.hrTimeToNanoseconds(metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0].startTime), + core.hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0] + .endTime + ), + core.hrTimeToNanoseconds( + metrics.scopeMetrics[0].metrics[histogramIndex].dataPoints[0] + .startTime + ), [0, 100], [0, 2, 0] ); @@ -444,7 +477,7 @@ describe('OTLPMetricExporter - node with json over http', () => { const url = 'http://foo.bar.com'; const collectorExporter = new OTLPMetricExporter({ url, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }); setTimeout(() => { assert.strictEqual(collectorExporter._otlpExporter.url, url); diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts index e62a8e0ece..c29ae0085c 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts @@ -14,25 +14,31 @@ * limitations under the License. */ +import { OTLPMetricExporterOptions } from '@opentelemetry/exporter-metrics-otlp-http'; import { - OTLPMetricExporterOptions -} from '@opentelemetry/exporter-metrics-otlp-http'; -import { ServiceClientType, OTLPProtoExporterNodeBase } from '@opentelemetry/otlp-proto-exporter-base'; + ServiceClientType, + OTLPProtoExporterNodeBase, +} from '@opentelemetry/otlp-proto-exporter-base'; import { getEnv, baggageUtils } from '@opentelemetry/core'; import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; import { OTLPMetricExporterBase } from '@opentelemetry/exporter-metrics-otlp-http'; import { OTLPExporterNodeConfigBase, appendResourcePathToUrl, - appendRootPathToUrlIfNeeded + appendRootPathToUrlIfNeeded, } from '@opentelemetry/otlp-exporter-base'; -import { createExportMetricsServiceRequest, IExportMetricsServiceRequest } from '@opentelemetry/otlp-transformer'; +import { + createExportMetricsServiceRequest, + IExportMetricsServiceRequest, +} from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/metrics'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; -class OTLPMetricExporterNodeProxy extends OTLPProtoExporterNodeBase { - +class OTLPMetricExporterNodeProxy extends OTLPProtoExporterNodeBase< + ResourceMetrics, + IExportMetricsServiceRequest +> { constructor(config?: OTLPExporterNodeConfigBase & OTLPMetricExporterOptions) { super(config); this.headers = Object.assign( @@ -51,10 +57,15 @@ class OTLPMetricExporterNodeProxy extends OTLPProtoExporterNodeBase 0 - ? appendRootPathToUrlIfNeeded(getEnv().OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) - : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 - ? appendResourcePathToUrl(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH) - : DEFAULT_COLLECTOR_URL; + ? appendRootPathToUrlIfNeeded( + getEnv().OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + ) + : getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0 + ? appendResourcePathToUrl( + getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, + DEFAULT_COLLECTOR_RESOURCE_PATH + ) + : DEFAULT_COLLECTOR_URL; } getServiceClientType() { diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts index 8ff43fed7c..05ca183dcc 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts @@ -16,7 +16,10 @@ import { diag } from '@opentelemetry/api'; import { ExportResultCode } from '@opentelemetry/core'; -import { getExportRequestProto, ServiceClientType } from '@opentelemetry/otlp-proto-exporter-base'; +import { + getExportRequestProto, + ServiceClientType, +} from '@opentelemetry/otlp-proto-exporter-base'; import * as assert from 'assert'; import * as http from 'http'; import * as sinon from 'sinon'; @@ -30,9 +33,15 @@ import { mockCounter, MockedResponse, mockObservableGauge, - mockHistogram, collect, setUp, shutdown, + mockHistogram, + collect, + setUp, + shutdown, } from './metricsHelper'; -import { AggregationTemporality, ResourceMetrics } from '@opentelemetry/sdk-metrics'; +import { + AggregationTemporality, + ResourceMetrics, +} from '@opentelemetry/sdk-metrics'; import { OTLPMetricExporterOptions } from '@opentelemetry/exporter-metrics-otlp-http'; import { Stream, PassThrough } from 'stream'; import { OTLPExporterNodeConfigBase } from '@opentelemetry/otlp-exporter-base'; @@ -42,7 +51,8 @@ let fakeRequest: PassThrough; describe('OTLPMetricExporter - node with proto over http', () => { let collectorExporter: OTLPMetricExporter; - let collectorExporterConfig: OTLPExporterNodeConfigBase & OTLPMetricExporterOptions; + let collectorExporterConfig: OTLPExporterNodeConfigBase & + OTLPMetricExporterOptions; let metrics: ResourceMetrics; afterEach(() => { @@ -109,7 +119,8 @@ describe('OTLPMetricExporter - node with proto over http', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path', () => { - envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = 'http://foo.bar/v1/metrics'; + envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = + 'http://foo.bar/v1/metrics'; const collectorExporter = new OTLPMetricExporter(); assert.strictEqual( collectorExporter._otlpExporter.url, @@ -118,7 +129,8 @@ describe('OTLPMetricExporter - node with proto over http', () => { envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = ''; }); it('should not add root path when signal url defined in env contains path and ends in /', () => { - envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = 'http://foo.bar/v1/metrics/'; + envSource.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = + 'http://foo.bar/v1/metrics/'; const collectorExporter = new OTLPMetricExporter(); assert.strictEqual( collectorExporter._otlpExporter.url, @@ -153,7 +165,7 @@ describe('OTLPMetricExporter - node with proto over http', () => { url: 'http://foo.bar.com', keepAlive: true, httpAgentOptions: { keepAliveMsecs: 2000 }, - temporalityPreference: AggregationTemporality.CUMULATIVE + temporalityPreference: AggregationTemporality.CUMULATIVE, }; collectorExporter = new OTLPMetricExporter(collectorExporterConfig); setUp(); @@ -180,8 +192,7 @@ describe('OTLPMetricExporter - node with proto over http', () => { }); it('should open the connection', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.hostname, 'foo.bar.com'); @@ -197,8 +208,7 @@ describe('OTLPMetricExporter - node with proto over http', () => { }); it('should set custom headers', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.headers['foo'], 'bar'); @@ -213,8 +223,7 @@ describe('OTLPMetricExporter - node with proto over http', () => { }); it('should have keep alive and keepAliveMsecs option set', done => { - collectorExporter.export(metrics, () => { - }); + collectorExporter.export(metrics, () => {}); sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.agent.keepAlive, true); @@ -236,18 +245,29 @@ describe('OTLPMetricExporter - node with proto over http', () => { let buff = Buffer.from(''); fakeRequest.on('end', () => { - const ExportTraceServiceRequestProto = getExportRequestProto(ServiceClientType.METRICS); + const ExportTraceServiceRequestProto = getExportRequestProto( + ServiceClientType.METRICS + ); const data = ExportTraceServiceRequestProto.decode(buff); const json = data?.toJSON() as IExportMetricsServiceRequest; // The order of the metrics is not guaranteed. - const counterIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-counter'); - const observableIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'double-observable-gauge'); - const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex(it => it.descriptor.name === 'int-histogram'); + const counterIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-counter' + ); + const observableIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'double-observable-gauge' + ); + const histogramIndex = metrics.scopeMetrics[0].metrics.findIndex( + it => it.descriptor.name === 'int-histogram' + ); - const metric1 = json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; - const metric2 = json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; - const metric3 = json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; + const metric1 = + json.resourceMetrics[0].scopeMetrics[0].metrics[counterIndex]; + const metric2 = + json.resourceMetrics[0].scopeMetrics[0].metrics[observableIndex]; + const metric3 = + json.resourceMetrics[0].scopeMetrics[0].metrics[histogramIndex]; assert.ok(typeof metric1 !== 'undefined', "counter doesn't exist"); ensureExportedCounterIsCorrect( @@ -255,7 +275,10 @@ describe('OTLPMetricExporter - node with proto over http', () => { metric1.sum?.dataPoints[0].timeUnixNano, metric1.sum?.dataPoints[0].startTimeUnixNano ); - assert.ok(typeof metric2 !== 'undefined', "observable gauge doesn't exist"); + assert.ok( + typeof metric2 !== 'undefined', + "observable gauge doesn't exist" + ); ensureExportedObservableGaugeIsCorrect( metric2, metric2.gauge?.dataPoints[0].timeUnixNano, @@ -282,7 +305,7 @@ describe('OTLPMetricExporter - node with proto over http', () => { }); const clock = sinon.useFakeTimers(); - collectorExporter.export(metrics, () => { }); + collectorExporter.export(metrics, () => {}); clock.tick(200); clock.restore(); }); diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/metricsHelper.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/metricsHelper.ts index d86775d332..0012f0c118 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/metricsHelper.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/metricsHelper.ts @@ -27,9 +27,13 @@ import { ExplicitBucketHistogramAggregation, MeterProvider, MetricReader, - View + View, } from '@opentelemetry/sdk-metrics'; -import { IExportMetricsServiceRequest, IKeyValue, IMetric } from '@opentelemetry/otlp-transformer'; +import { + IExportMetricsServiceRequest, + IKeyValue, + IMetric, +} from '@opentelemetry/otlp-transformer'; import { Stream } from 'stream'; export class TestMetricReader extends MetricReader { @@ -66,13 +70,11 @@ export function setUp() { new View({ aggregation: new ExplicitBucketHistogramAggregation([0, 100]), instrumentName: 'int-histogram', - }) - ] + }), + ], }); reader = new TestMetricReader(); - meterProvider.addMetricReader( - reader - ); + meterProvider.addMetricReader(reader); meter = meterProvider.getMeter('default', '0.0.1'); } @@ -92,13 +94,10 @@ export function mockObservableGauge( callback: (observableResult: ObservableResult) => void ): ObservableGauge { const name = 'double-observable-gauge'; - const observableGauge = meter.createObservableGauge( - name, - { - description: 'sample observable gauge description', - valueType: ValueType.DOUBLE, - }, - ); + const observableGauge = meter.createObservableGauge(name, { + description: 'sample observable gauge description', + valueType: ValueType.DOUBLE, + }); observableGauge.addCallback(callback); return observableGauge; @@ -113,9 +112,7 @@ export function mockHistogram(): Histogram { }); } -export function ensureProtoAttributesAreCorrect( - attributes: IKeyValue[] -) { +export function ensureProtoAttributesAreCorrect(attributes: IKeyValue[]) { assert.deepStrictEqual( attributes, [ diff --git a/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusExporter.ts b/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusExporter.ts index 4f333eec55..63c00c4d6f 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusExporter.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusExporter.ts @@ -15,20 +15,13 @@ */ import { diag } from '@opentelemetry/api'; -import { - globalErrorHandler, -} from '@opentelemetry/core'; +import { globalErrorHandler } from '@opentelemetry/core'; import { Aggregation, AggregationTemporality, - MetricReader + MetricReader, } from '@opentelemetry/sdk-metrics'; -import { - createServer, - IncomingMessage, - Server, - ServerResponse -} from 'http'; +import { createServer, IncomingMessage, Server, ServerResponse } from 'http'; import { ExporterConfig } from './export/types'; import { PrometheusSerializer } from './PrometheusSerializer'; /** Node.js v8.x compat */ @@ -64,7 +57,8 @@ export class PrometheusExporter extends MetricReader { constructor(config: ExporterConfig = {}, callback?: () => void) { super({ aggregationSelector: _instrumentType => Aggregation.Default(), - aggregationTemporalitySelector: _instrumentType => AggregationTemporality.CUMULATIVE + aggregationTemporalitySelector: _instrumentType => + AggregationTemporality.CUMULATIVE, }); this._host = config.host || @@ -127,7 +121,7 @@ export class PrometheusExporter extends MetricReader { diag.debug('Prometheus exporter was stopped'); } else { if ( - ((err as unknown) as { code: string }).code !== + (err as unknown as { code: string }).code !== 'ERR_SERVER_NOT_RUNNING' ) { globalErrorHandler(err); @@ -182,7 +176,10 @@ export class PrometheusExporter extends MetricReader { request: IncomingMessage, response: ServerResponse ) => { - if (request.url != null && new URL(request.url, this._baseUrl).pathname === this._endpoint) { + if ( + request.url != null && + new URL(request.url, this._baseUrl).pathname === this._endpoint + ) { this._exportMetrics(response); } else { this._notFound(response); @@ -195,19 +192,21 @@ export class PrometheusExporter extends MetricReader { private _exportMetrics = (response: ServerResponse) => { response.statusCode = 200; response.setHeader('content-type', 'text/plain'); - this.collect() - .then( - collectionResult => { - const { resourceMetrics, errors } = collectionResult; - if (errors.length) { - diag.error('PrometheusExporter: metrics collection errors', ...errors); - } - response.end(this._serializer.serialize(resourceMetrics)); - }, - err => { - response.end(`# failed to export metrics: ${err}`); + this.collect().then( + collectionResult => { + const { resourceMetrics, errors } = collectionResult; + if (errors.length) { + diag.error( + 'PrometheusExporter: metrics collection errors', + ...errors + ); } - ); + response.end(this._serializer.serialize(resourceMetrics)); + }, + err => { + response.end(`# failed to export metrics: ${err}`); + } + ); }; /** diff --git a/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts b/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts index 0879f16e87..8df7e7a56b 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts @@ -114,9 +114,7 @@ function valueString(value: number) { } } -function toPrometheusType( - metricData: MetricData, -): PrometheusDataTypeLiteral { +function toPrometheusType(metricData: MetricData): PrometheusDataTypeLiteral { switch (metricData.dataPointType) { case DataPointType.SUM: if (metricData.isMonotonic) { @@ -212,45 +210,58 @@ export class PrometheusSerializer { } const dataPointType = metricData.dataPointType; - name = enforcePrometheusNamingConvention( - name, - metricData.descriptor.type - ); + name = enforcePrometheusNamingConvention(name, metricData.descriptor.type); const help = `# HELP ${name} ${escapeString( metricData.descriptor.description || 'description missing' )}`; - const unit = metricData.descriptor.unit ? `\n# UNIT ${name} ${escapeString( - metricData.descriptor.unit, - )}` : ''; - const type = `# TYPE ${name} ${toPrometheusType( - metricData - )}`; + const unit = metricData.descriptor.unit + ? `\n# UNIT ${name} ${escapeString(metricData.descriptor.unit)}` + : ''; + const type = `# TYPE ${name} ${toPrometheusType(metricData)}`; let results = ''; switch (dataPointType) { case DataPointType.SUM: case DataPointType.GAUGE: { results = metricData.dataPoints - .map(it => this._serializeSingularDataPoint(name, metricData.descriptor.type, it)) + .map(it => + this._serializeSingularDataPoint( + name, + metricData.descriptor.type, + it + ) + ) .join(''); break; } case DataPointType.HISTOGRAM: { results = metricData.dataPoints - .map(it => this._serializeHistogramDataPoint(name, metricData.descriptor.type, it)) + .map(it => + this._serializeHistogramDataPoint( + name, + metricData.descriptor.type, + it + ) + ) .join(''); break; } default: { - diag.error(`Unrecognizable DataPointType: ${dataPointType} for metric "${name}"`); + diag.error( + `Unrecognizable DataPointType: ${dataPointType} for metric "${name}"` + ); } } return `${help}${unit}\n${type}\n${results}`.trim(); } - private _serializeSingularDataPoint(name: string, type: InstrumentType, dataPoint: DataPoint): string { + private _serializeSingularDataPoint( + name: string, + type: InstrumentType, + dataPoint: DataPoint + ): string { let results = ''; name = enforcePrometheusNamingConvention(name, type); @@ -266,7 +277,11 @@ export class PrometheusSerializer { return results; } - private _serializeHistogramDataPoint(name: string, type: InstrumentType, dataPoint: DataPoint): string { + private _serializeHistogramDataPoint( + name: string, + type: InstrumentType, + dataPoint: DataPoint + ): string { let results = ''; name = enforcePrometheusNamingConvention(name, type); diff --git a/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts b/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts index 31137bd156..c7050140fe 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts @@ -14,11 +14,7 @@ * limitations under the License. */ -import { - Counter, - Meter, - ObservableResult -} from '@opentelemetry/api'; +import { Counter, Meter, ObservableResult } from '@opentelemetry/api'; import { MeterProvider } from '@opentelemetry/sdk-metrics'; import * as assert from 'assert'; import * as sinon from 'sinon'; @@ -30,7 +26,7 @@ import { sdkLanguage, sdkName, sdkVersion, - serviceName + serviceName, } from './util'; import { SinonStubbedInstance } from 'sinon'; @@ -39,7 +35,7 @@ const infoLine = `target_info{service_name="${serviceName}",telemetry_sdk_langua const serializedDefaultResourceLines = [ '# HELP target_info Target metadata', '# TYPE target_info gauge', - infoLine + infoLine, ]; describe('PrometheusExporter', () => { @@ -135,7 +131,7 @@ describe('PrometheusExporter', () => { it('should unref the server to allow graceful termination', () => { const mockServer = sinon.createStubInstance(http.Server); const createStub = sinon.stub(http, 'createServer'); - createStub.returns((mockServer as any) as http.Server); + createStub.returns(mockServer as any as http.Server); const exporter = new PrometheusExporter({}, async () => { await exporter.shutdown(); }); @@ -217,20 +213,18 @@ describe('PrometheusExporter', () => { it('should able to call getMetricsRequestHandler function to generate response with metrics', async () => { const exporter = new PrometheusExporter({ preventServerStart: true }); - const mockRequest: SinonStubbedInstance = sinon.createStubInstance( - http.IncomingMessage - ); - const mockResponse: SinonStubbedInstance = sinon.createStubInstance( - http.ServerResponse - ); + const mockRequest: SinonStubbedInstance = + sinon.createStubInstance(http.IncomingMessage); + const mockResponse: SinonStubbedInstance = + sinon.createStubInstance(http.ServerResponse); let resolve: () => void; const deferred = new Promise(res => { resolve = res; }); mockResponse.end.callsFake(() => resolve() as any); exporter.getMetricsRequestHandler( - (mockRequest as unknown) as http.IncomingMessage, - (mockResponse as unknown) as http.ServerResponse + mockRequest as unknown as http.IncomingMessage, + mockResponse as unknown as http.ServerResponse ); await deferred; sinon.assert.calledOnce(mockResponse.setHeader); @@ -289,7 +283,7 @@ describe('PrometheusExporter', () => { 'metric_observable_gauge', { description: 'a test description', - }, + } ); observableGauge.addCallback((observableResult: ObservableResult) => { observableResult.observe(getCpuUsage(), { @@ -358,7 +352,7 @@ describe('PrometheusExporter', () => { assert.deepStrictEqual(lines, [ ...serializedDefaultResourceLines, - '# no registered metrics' + '# no registered metrics', ]); }); @@ -423,7 +417,7 @@ describe('PrometheusExporter', () => { 'metric_observable_counter', { description: 'a test description', - }, + } ); observableCounter.addCallback((observableResult: ObservableResult) => { observableResult.observe(getValue(), { @@ -452,13 +446,15 @@ describe('PrometheusExporter', () => { 'metric_observable_up_down_counter', { description: 'a test description', - }, + } + ); + observableUpDownCounter.addCallback( + (observableResult: ObservableResult) => { + observableResult.observe(getValue(), { + key1: 'attributeValue1', + }); + } ); - observableUpDownCounter.addCallback((observableResult: ObservableResult) => { - observableResult.observe(getValue(), { - key1: 'attributeValue1', - }); - }); const body = await request('http://localhost:9464/metrics'); const lines = body.split('\n'); diff --git a/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts b/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts index a4792eef54..6c5def7bcf 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts @@ -15,10 +15,7 @@ */ import * as assert from 'assert'; -import { - MetricAttributes, - UpDownCounter -} from '@opentelemetry/api'; +import { MetricAttributes, UpDownCounter } from '@opentelemetry/api'; import { Aggregation, AggregationTemporality, @@ -40,7 +37,7 @@ import { sdkLanguage, sdkName, sdkVersion, - serviceName + serviceName, } from './util'; import { Resource } from '@opentelemetry/resources'; @@ -57,8 +54,9 @@ const serializedDefaultResource = class TestMetricReader extends MetricReader { constructor() { super({ - aggregationTemporalitySelector: _instrumentType => AggregationTemporality.CUMULATIVE, - aggregationSelector: _instrumentType => Aggregation.Default() + aggregationTemporalitySelector: _instrumentType => + AggregationTemporality.CUMULATIVE, + aggregationSelector: _instrumentType => Aggregation.Default(), }); } @@ -86,11 +84,14 @@ describe('PrometheusSerializer', () => { describe('Singular', () => { async function testSerializer(serializer: PrometheusSerializer) { const reader = new TestMetricReader(); - const meterProvider = new MeterProvider( - { - views: [new View({ aggregation: new SumAggregation(), instrumentName: '*' })] - } - ); + const meterProvider = new MeterProvider({ + views: [ + new View({ + aggregation: new SumAggregation(), + instrumentName: '*', + }), + ], + }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -106,7 +107,11 @@ describe('PrometheusSerializer', () => { const pointData = metric.dataPoints as DataPoint[]; assert.strictEqual(pointData.length, 1); - const result = serializer['_serializeSingularDataPoint'](metric.descriptor.name, metric.descriptor.type, pointData[0]); + const result = serializer['_serializeSingularDataPoint']( + metric.descriptor.name, + metric.descriptor.type, + pointData[0] + ); return result; } @@ -122,10 +127,7 @@ describe('PrometheusSerializer', () => { it('should serialize metrics with singular data type without timestamp', async () => { const serializer = new PrometheusSerializer(undefined, false); const result = await testSerializer(serializer); - assert.strictEqual( - result, - 'test_total{foo1="bar1",foo2="bar2"} 1\n' - ); + assert.strictEqual(result, 'test_total{foo1="bar1",foo2="bar2"} 1\n'); }); }); @@ -133,10 +135,12 @@ describe('PrometheusSerializer', () => { async function testSerializer(serializer: PrometheusSerializer) { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ - views: [new View({ - aggregation: new ExplicitBucketHistogramAggregation([1, 10, 100]), - instrumentName: '*' - })] + views: [ + new View({ + aggregation: new ExplicitBucketHistogramAggregation([1, 10, 100]), + instrumentName: '*', + }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -153,7 +157,11 @@ describe('PrometheusSerializer', () => { const pointData = metric.dataPoints as DataPoint[]; assert.strictEqual(pointData.length, 1); - const result = serializer['_serializeHistogramDataPoint'](metric.descriptor.name, metric.descriptor.type, pointData[0]); + const result = serializer['_serializeHistogramDataPoint']( + metric.descriptor.name, + metric.descriptor.type, + pointData[0] + ); return result; } @@ -192,7 +200,12 @@ describe('PrometheusSerializer', () => { async function testSerializer(serializer: PrometheusSerializer) { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ - views: [new View({ aggregation: new SumAggregation(), instrumentName: '*' })] + views: [ + new View({ + aggregation: new SumAggregation(), + instrumentName: '*', + }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -243,8 +256,11 @@ describe('PrometheusSerializer', () => { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ views: [ - new View({ aggregation: new SumAggregation(), instrumentName: '*' }) - ] + new View({ + aggregation: new SumAggregation(), + instrumentName: '*', + }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -270,9 +286,9 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test_total foobar\n' + - '# TYPE test_total gauge\n' + - `test_total{val="1"} 1 ${mockedHrTimeMs}\n` + - `test_total{val="2"} 1 ${mockedHrTimeMs}\n` + '# TYPE test_total gauge\n' + + `test_total{val="1"} 1 ${mockedHrTimeMs}\n` + + `test_total{val="2"} 1 ${mockedHrTimeMs}\n` ); }); @@ -282,9 +298,9 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test_total foobar\n' + - '# TYPE test_total gauge\n' + - 'test_total{val="1"} 1\n' + - 'test_total{val="2"} 1\n' + '# TYPE test_total gauge\n' + + 'test_total{val="1"} 1\n' + + 'test_total{val="2"} 1\n' ); }); }); @@ -294,8 +310,11 @@ describe('PrometheusSerializer', () => { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ views: [ - new View({ aggregation: new LastValueAggregation(), instrumentName: '*' }) - ] + new View({ + aggregation: new LastValueAggregation(), + instrumentName: '*', + }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -321,9 +340,9 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test_total foobar\n' + - '# TYPE test_total gauge\n' + - `test_total{val="1"} 1 ${mockedHrTimeMs}\n` + - `test_total{val="2"} 1 ${mockedHrTimeMs}\n` + '# TYPE test_total gauge\n' + + `test_total{val="1"} 1 ${mockedHrTimeMs}\n` + + `test_total{val="2"} 1 ${mockedHrTimeMs}\n` ); }); @@ -333,9 +352,9 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test_total foobar\n' + - '# TYPE test_total gauge\n' + - 'test_total{val="1"} 1\n' + - 'test_total{val="2"} 1\n' + '# TYPE test_total gauge\n' + + 'test_total{val="1"} 1\n' + + 'test_total{val="2"} 1\n' ); }); }); @@ -344,10 +363,12 @@ describe('PrometheusSerializer', () => { async function testSerializer(serializer: PrometheusSerializer) { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ - views: [new View({ - aggregation: new ExplicitBucketHistogramAggregation([1, 10, 100]), - instrumentName: '*' - })] + views: [ + new View({ + aggregation: new ExplicitBucketHistogramAggregation([1, 10, 100]), + instrumentName: '*', + }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -377,7 +398,7 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test foobar\n' + - '# TYPE test histogram\n' + + '# TYPE test histogram\n' + `test_count{val="1"} 3 ${mockedHrTimeMs}\n` + `test_sum{val="1"} 175 ${mockedHrTimeMs}\n` + `test_bucket{val="1",le="1"} 0 ${mockedHrTimeMs}\n` + @@ -400,9 +421,9 @@ describe('PrometheusSerializer', () => { views: [ new View({ aggregation: new ExplicitBucketHistogramAggregation([1, 10, 100]), - instrumentName: '*' - }) - ] + instrumentName: '*', + }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -426,28 +447,33 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test foobar\n' + - '# TYPE test histogram\n' + - `test_count{val="1"} 3 ${mockedHrTimeMs}\n` + - `test_bucket{val="1",le="1"} 0 ${mockedHrTimeMs}\n` + - `test_bucket{val="1",le="10"} 1 ${mockedHrTimeMs}\n` + - `test_bucket{val="1",le="100"} 2 ${mockedHrTimeMs}\n` + - `test_bucket{val="1",le="+Inf"} 3 ${mockedHrTimeMs}\n` + - `test_count{val="2"} 1 ${mockedHrTimeMs}\n` + - `test_bucket{val="2",le="1"} 0 ${mockedHrTimeMs}\n` + - `test_bucket{val="2",le="10"} 1 ${mockedHrTimeMs}\n` + - `test_bucket{val="2",le="100"} 1 ${mockedHrTimeMs}\n` + - `test_bucket{val="2",le="+Inf"} 1 ${mockedHrTimeMs}\n` + '# TYPE test histogram\n' + + `test_count{val="1"} 3 ${mockedHrTimeMs}\n` + + `test_bucket{val="1",le="1"} 0 ${mockedHrTimeMs}\n` + + `test_bucket{val="1",le="10"} 1 ${mockedHrTimeMs}\n` + + `test_bucket{val="1",le="100"} 2 ${mockedHrTimeMs}\n` + + `test_bucket{val="1",le="+Inf"} 3 ${mockedHrTimeMs}\n` + + `test_count{val="2"} 1 ${mockedHrTimeMs}\n` + + `test_bucket{val="2",le="1"} 0 ${mockedHrTimeMs}\n` + + `test_bucket{val="2",le="10"} 1 ${mockedHrTimeMs}\n` + + `test_bucket{val="2",le="100"} 1 ${mockedHrTimeMs}\n` + + `test_bucket{val="2",le="+Inf"} 1 ${mockedHrTimeMs}\n` ); }); }); }); describe('validate against metric conventions', () => { - - async function getCounterResult(name: string, serializer: PrometheusSerializer, options: Partial<{ unit: string, exportAll: boolean }> = {}) { + async function getCounterResult( + name: string, + serializer: PrometheusSerializer, + options: Partial<{ unit: string; exportAll: boolean }> = {} + ) { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ - views: [new View({ aggregation: new SumAggregation(), instrumentName: '*' })] + views: [ + new View({ aggregation: new SumAggregation(), instrumentName: '*' }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -469,7 +495,11 @@ describe('PrometheusSerializer', () => { const result = serializer.serialize(resourceMetrics); return result; } else { - const result = serializer['_serializeSingularDataPoint'](metric.descriptor.name, metric.descriptor.type, pointData[0]); + const result = serializer['_serializeSingularDataPoint']( + metric.descriptor.name, + metric.descriptor.type, + pointData[0] + ); return result; } } @@ -478,27 +508,32 @@ describe('PrometheusSerializer', () => { const serializer = new PrometheusSerializer(); const unitOfMetric = 'seconds'; - const result = await getCounterResult('test', serializer, { unit: unitOfMetric, exportAll: true }); + const result = await getCounterResult('test', serializer, { + unit: unitOfMetric, + exportAll: true, + }); assert.strictEqual( result, serializedDefaultResource + - '# HELP test_total description missing\n' + - `# UNIT test_total ${unitOfMetric}\n` + - '# TYPE test_total counter\n' + - `test_total 1 ${mockedHrTimeMs}\n` + '# HELP test_total description missing\n' + + `# UNIT test_total ${unitOfMetric}\n` + + '# TYPE test_total counter\n' + + `test_total 1 ${mockedHrTimeMs}\n` ); }); it('should not export unit block when unit of metric is missing', async () => { const serializer = new PrometheusSerializer(); - const result = await getCounterResult('test', serializer, { exportAll: true }); + const result = await getCounterResult('test', serializer, { + exportAll: true, + }); assert.strictEqual( result, serializedDefaultResource + - '# HELP test_total description missing\n' + - '# TYPE test_total counter\n' + - `test_total 1 ${mockedHrTimeMs}\n` + '# HELP test_total description missing\n' + + '# TYPE test_total counter\n' + + `test_total 1 ${mockedHrTimeMs}\n` ); }); @@ -518,10 +553,16 @@ describe('PrometheusSerializer', () => { }); describe('serialize non-normalized values', () => { - async function testSerializer(serializer: PrometheusSerializer, name: string, fn: (counter: UpDownCounter) => void) { + async function testSerializer( + serializer: PrometheusSerializer, + name: string, + fn: (counter: UpDownCounter) => void + ) { const reader = new TestMetricReader(); const meterProvider = new MeterProvider({ - views: [new View({ aggregation: new SumAggregation(), instrumentName: '*' })] + views: [ + new View({ aggregation: new SumAggregation(), instrumentName: '*' }), + ], }); meterProvider.addMetricReader(reader); const meter = meterProvider.getMeter('test'); @@ -538,7 +579,11 @@ describe('PrometheusSerializer', () => { const pointData = metric.dataPoints as DataPoint[]; assert.strictEqual(pointData.length, 1); - const result = serializer['_serializeSingularDataPoint'](metric.descriptor.name, metric.descriptor.type, pointData[0]); + const result = serializer['_serializeSingularDataPoint']( + metric.descriptor.name, + metric.descriptor.type, + pointData[0] + ); return result; } @@ -556,7 +601,7 @@ describe('PrometheusSerializer', () => { const serializer = new PrometheusSerializer(); const result = await testSerializer(serializer, 'test_total', counter => { - counter.add(1, ({ + counter.add(1, { true: true, false: false, array: [1, undefined, null, 2], @@ -565,7 +610,7 @@ describe('PrometheusSerializer', () => { NaN: NaN, null: null, undefined: undefined, - } as unknown) as MetricAttributes); + } as unknown as MetricAttributes); }); assert.strictEqual( @@ -598,14 +643,14 @@ describe('PrometheusSerializer', () => { const serializer = new PrometheusSerializer(); const result = await testSerializer(serializer, 'test_total', counter => { - counter.add(1, ({ + counter.add(1, { backslash: '\u005c', // \ => \\ (\u005c\u005c) doubleQuote: '\u0022', // " => \" (\u005c\u0022) lineFeed: '\u000a', // ↵ => \n (\u005c\u006e) backslashN: '\u005c\u006e', // \n => \\n (\u005c\u005c\u006e) backslashDoubleQuote: '\u005c\u0022', // \" => \\\" (\u005c\u005c\u005c\u0022) backslashLineFeed: '\u005c\u000a', // \↵ => \\\n (\u005c\u005c\u005c\u006e) - } as unknown) as MetricAttributes); + } as unknown as MetricAttributes); }); assert.strictEqual( @@ -628,9 +673,9 @@ describe('PrometheusSerializer', () => { // if you try to use an attribute name like account-id prometheus will complain // with an error like: // error while linting: text format parsing error in line 282: expected '=' after label name, found '-' - counter.add(1, ({ + counter.add(1, { 'account-id': '123456', - } as unknown) as MetricAttributes); + } as unknown as MetricAttributes); }); assert.strictEqual( @@ -643,19 +688,21 @@ describe('PrometheusSerializer', () => { describe('_serializeResource', () => { it('should serialize resource', () => { const serializer = new PrometheusSerializer(undefined, true); - const result = serializer['_serializeResource'](new Resource({ - env: 'prod', - hostname: 'myhost', - datacenter: 'sdc', - region: 'europe', - owner: 'frontend' - })); + const result = serializer['_serializeResource']( + new Resource({ + env: 'prod', + hostname: 'myhost', + datacenter: 'sdc', + region: 'europe', + owner: 'frontend', + }) + ); assert.strictEqual( result, '# HELP target_info Target metadata\n' + - '# TYPE target_info gauge\n' + - 'target_info{env="prod",hostname="myhost",datacenter="sdc",region="europe",owner="frontend"} 1\n' + '# TYPE target_info gauge\n' + + 'target_info{env="prod",hostname="myhost",datacenter="sdc",region="europe",owner="frontend"} 1\n' ); }); }); diff --git a/experimental/packages/opentelemetry-exporter-prometheus/test/util.ts b/experimental/packages/opentelemetry-exporter-prometheus/test/util.ts index 8caa7795cd..be07dd3d8c 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/test/util.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/test/util.ts @@ -28,11 +28,19 @@ export function mockHrTime() { sinon.stub(perf_hooks.performance, 'now').returns(mockedHrTimeMs); } -export const serviceName = Resource.default().attributes[SemanticResourceAttributes.SERVICE_NAME]?.toString() - .replace(/\\/g, '\\\\').replace(/\n/g, '\\n'); -export const sdkLanguage = Resource.default().attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]?.toString() - .replace(/\\/g, '\\\\').replace(/\n/g, '\\n'); -export const sdkName = Resource.default().attributes[SemanticResourceAttributes.TELEMETRY_SDK_NAME]?.toString() - .replace(/\\/g, '\\\\').replace(/\n/g, '\\n'); -export const sdkVersion = Resource.default().attributes[SemanticResourceAttributes.TELEMETRY_SDK_VERSION]?.toString() - .replace(/\\/g, '\\\\').replace(/\n/g, '\\n'); +export const serviceName = Resource.default() + .attributes[SemanticResourceAttributes.SERVICE_NAME]?.toString() + .replace(/\\/g, '\\\\') + .replace(/\n/g, '\\n'); +export const sdkLanguage = Resource.default() + .attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]?.toString() + .replace(/\\/g, '\\\\') + .replace(/\n/g, '\\n'); +export const sdkName = Resource.default() + .attributes[SemanticResourceAttributes.TELEMETRY_SDK_NAME]?.toString() + .replace(/\\/g, '\\\\') + .replace(/\n/g, '\\n'); +export const sdkVersion = Resource.default() + .attributes[SemanticResourceAttributes.TELEMETRY_SDK_VERSION]?.toString() + .replace(/\\/g, '\\\\') + .replace(/\n/g, '\\n'); diff --git a/experimental/packages/opentelemetry-instrumentation-fetch/src/fetch.ts b/experimental/packages/opentelemetry-instrumentation-fetch/src/fetch.ts index f30c185bce..14862a8e81 100644 --- a/experimental/packages/opentelemetry-instrumentation-fetch/src/fetch.ts +++ b/experimental/packages/opentelemetry-instrumentation-fetch/src/fetch.ts @@ -70,7 +70,9 @@ export interface FetchInstrumentationConfig extends InstrumentationConfig { /** * This class represents a fetch plugin for auto instrumentation */ -export class FetchInstrumentation extends InstrumentationBase> { +export class FetchInstrumentation extends InstrumentationBase< + Promise +> { readonly component: string = 'fetch'; readonly version: string = VERSION; moduleName = this.component; @@ -78,11 +80,7 @@ export class FetchInstrumentation extends InstrumentationBase> private _tasksCount = 0; constructor(config?: FetchInstrumentationConfig) { - super( - '@opentelemetry/instrumentation-fetch', - VERSION, - config - ); + super('@opentelemetry/instrumentation-fetch', VERSION, config); } init(): void {} @@ -301,7 +299,9 @@ export class FetchInstrumentation extends InstrumentationBase> ...args: Parameters ): Promise { const self = this; - const url = web.parseUrl(args[0] instanceof Request ? args[0].url : args[0]).href; + const url = web.parseUrl( + args[0] instanceof Request ? args[0].url : args[0] + ).href; const options = args[0] instanceof Request ? args[0] : args[1] || {}; const createdSpan = plugin._createSpan(url, options); @@ -388,7 +388,10 @@ export class FetchInstrumentation extends InstrumentationBase> // TypeScript complains about arrow function captured a this typed as globalThis // ts(7041) return original - .apply(self, options instanceof Request ? [options] : [url, options]) + .apply( + self, + options instanceof Request ? [options] : [url, options] + ) .then( onSuccess.bind(self, createdSpan, resolve), onError.bind(self, createdSpan, reject) @@ -405,8 +408,8 @@ export class FetchInstrumentation extends InstrumentationBase> request: Request | RequestInit, result: Response | FetchError ) { - const applyCustomAttributesOnSpan = this._getConfig() - .applyCustomAttributesOnSpan; + const applyCustomAttributesOnSpan = + this._getConfig().applyCustomAttributesOnSpan; if (applyCustomAttributesOnSpan) { safeExecuteInTheMiddle( () => applyCustomAttributesOnSpan(span, request, result), @@ -437,10 +440,7 @@ export class FetchInstrumentation extends InstrumentationBase> const observer = new PerformanceObserver(list => { const perfObsEntries = list.getEntries() as PerformanceResourceTiming[]; perfObsEntries.forEach(entry => { - if ( - entry.initiatorType === 'fetch' && - entry.name === spanUrl - ) { + if (entry.initiatorType === 'fetch' && entry.name === spanUrl) { entries.push(entry); } }); diff --git a/experimental/packages/opentelemetry-instrumentation-fetch/test/fetch.test.ts b/experimental/packages/opentelemetry-instrumentation-fetch/test/fetch.test.ts index 439ca17cc8..40f3381dbb 100644 --- a/experimental/packages/opentelemetry-instrumentation-fetch/test/fetch.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-fetch/test/fetch.test.ts @@ -184,7 +184,10 @@ describe('fetch', () => { response.status = 405; response.statusText = 'OK'; resolve(new window.Response('foo', response)); - } else if ((input instanceof Request && input.url === url) || input === url) { + } else if ( + (input instanceof Request && input.url === url) || + input === url + ) { response.status = 200; response.statusText = 'OK'; resolve(new window.Response(JSON.stringify(response), response)); @@ -241,39 +244,46 @@ describe('fetch', () => { // this process is scheduled at the same time the fetch promise is resolved // due to this we can't rely on getData resolution to know that the span has ended let resolveEndSpan: (value: unknown) => void; - const spanEnded = new Promise(r => resolveEndSpan = r); - const readSpy = sinon.spy(window.ReadableStreamDefaultReader.prototype, 'read'); - const endSpanStub: sinon.SinonStub = sinon.stub(FetchInstrumentation.prototype, '_endSpan' as any) + const spanEnded = new Promise(r => (resolveEndSpan = r)); + const readSpy = sinon.spy( + window.ReadableStreamDefaultReader.prototype, + 'read' + ); + const endSpanStub: sinon.SinonStub = sinon + .stub(FetchInstrumentation.prototype, '_endSpan' as any) .callsFake(async function (this: FetchInstrumentation, ...args: any[]) { resolveEndSpan({}); return endSpanStub.wrappedMethod.apply(this, args); }); rootSpan = webTracerWithZone.startSpan('root'); - await api.context.with(api.trace.setSpan(api.context.active(), rootSpan), async () => { - fakeNow = 0; - try { - const responsePromise = getData(fileUrl, method); - fakeNow = 300; - const response = await responsePromise; - - // if the url is not ignored, body.read should be called by now - // awaiting for the span to end - if (readSpy.callCount > 0) await spanEnded; - - // this is a bit tricky as the only way to get all request headers from - // fetch is to use json() - lastResponse = await response.json(); - const headers: { [key: string]: string } = {}; - Object.keys(lastResponse.headers).forEach(key => { - headers[key.toLowerCase()] = lastResponse.headers[key]; - }); - lastResponse.headers = headers; - } catch (e) { - lastResponse = undefined; + await api.context.with( + api.trace.setSpan(api.context.active(), rootSpan), + async () => { + fakeNow = 0; + try { + const responsePromise = getData(fileUrl, method); + fakeNow = 300; + const response = await responsePromise; + + // if the url is not ignored, body.read should be called by now + // awaiting for the span to end + if (readSpy.callCount > 0) await spanEnded; + + // this is a bit tricky as the only way to get all request headers from + // fetch is to use json() + lastResponse = await response.json(); + const headers: { [key: string]: string } = {}; + Object.keys(lastResponse.headers).forEach(key => { + headers[key.toLowerCase()] = lastResponse.headers[key]; + }); + lastResponse.headers = headers; + } catch (e) { + lastResponse = undefined; + } + await sinon.clock.runAllAsync(); } - await sinon.clock.runAllAsync(); - }); + ); }; beforeEach(() => { @@ -541,7 +551,7 @@ describe('fetch', () => { it('should keep custom headers with a request object and a headers object', () => { const r = new Request('url', { - headers: new Headers({'foo': 'bar'}) + headers: new Headers({ foo: 'bar' }), }); window.fetch(r).catch(() => {}); assert.ok(r.headers.get('foo') === 'bar'); @@ -550,7 +560,7 @@ describe('fetch', () => { it('should keep custom headers with url, untyped request object and typed headers object', () => { const url = 'url'; const init = { - headers: new Headers({'foo': 'bar'}) + headers: new Headers({ foo: 'bar' }), }; window.fetch(url, init).catch(() => {}); assert.ok(init.headers.get('foo') === 'bar'); @@ -559,7 +569,7 @@ describe('fetch', () => { it('should keep custom headers with url, untyped request object and untyped headers object', () => { const url = 'url'; const init = { - headers: {'foo': 'bar'} + headers: { foo: 'bar' }, }; window.fetch(url, init).catch(() => {}); assert.ok(init.headers['foo'] === 'bar'); @@ -567,11 +577,14 @@ describe('fetch', () => { it('should pass request object as first parameter to the original function (#2411)', () => { const r = new Request(url); - return window.fetch(r).then(() => { - assert.ok(true); - }, (response: Response) => { - assert.fail(response.statusText); - }); + return window.fetch(r).then( + () => { + assert.ok(true); + }, + (response: Response) => { + assert.fail(response.statusText); + } + ); }); it('should NOT clear the resources', () => { @@ -625,7 +638,7 @@ describe('fetch', () => { describe('applyCustomAttributesOnSpan option', () => { const prepare = async ( url: string, - applyCustomAttributesOnSpan: FetchCustomAttributeFunction, + applyCustomAttributesOnSpan: FetchCustomAttributeFunction ) => { const propagateTraceHeaderCorsUrls = [url]; @@ -640,12 +653,9 @@ describe('fetch', () => { }); it('applies attributes when the request is succesful', async () => { - await prepare( - url, - span => { - span.setAttribute(CUSTOM_ATTRIBUTE_KEY, 'custom value'); - }, - ); + await prepare(url, span => { + span.setAttribute(CUSTOM_ATTRIBUTE_KEY, 'custom value'); + }); const span: tracing.ReadableSpan = exportSpy.args[1][0][0]; const attributes = span.attributes; @@ -653,12 +663,9 @@ describe('fetch', () => { }); it('applies custom attributes when the request fails', async () => { - await prepare( - badUrl, - span => { - span.setAttribute(CUSTOM_ATTRIBUTE_KEY, 'custom value'); - }, - ); + await prepare(badUrl, span => { + span.setAttribute(CUSTOM_ATTRIBUTE_KEY, 'custom value'); + }); const span: tracing.ReadableSpan = exportSpy.args[1][0][0]; const attributes = span.attributes; @@ -716,11 +723,14 @@ describe('fetch', () => { }); it('should pass request object as the first parameter to the original function (#2411)', () => { const r = new Request(url); - return window.fetch(r).then(() => { - assert.ok(true); - }, (response: Response) => { - assert.fail(response.statusText); - }); + return window.fetch(r).then( + () => { + assert.ok(true); + }, + (response: Response) => { + assert.fail(response.statusText); + } + ); }); }); diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/enums/AttributeValues.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/enums/AttributeValues.ts index 0cdcb14311..a08898c166 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/enums/AttributeValues.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/enums/AttributeValues.ts @@ -15,9 +15,9 @@ */ interface AttributeValuesType { - RPC_SYSTEM: string + RPC_SYSTEM: string; } export const AttributeValues: Readonly = { - RPC_SYSTEM: 'grpc' + RPC_SYSTEM: 'grpc', }; diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/clientUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/clientUtils.ts index 27486118bd..050eee24b1 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/clientUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/clientUtils.ts @@ -92,10 +92,7 @@ export function makeGrpcClientRemoteCall( if (err) { if (err.code) { span.setStatus(_grpcStatusCodeToSpanStatus(err.code)); - span.setAttribute( - SemanticAttributes.RPC_GRPC_STATUS_CODE, - err.code - ); + span.setAttribute(SemanticAttributes.RPC_GRPC_STATUS_CODE, err.code); } span.setAttributes({ [AttributeNames.GRPC_ERROR_NAME]: err.name, diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/index.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/index.ts index 5c99fab6ae..9473a17994 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/index.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/index.ts @@ -49,7 +49,7 @@ import { getMetadata, } from './clientUtils'; import { EventEmitter } from 'events'; -import {_extractMethodAndService, metadataCapture} from '../utils'; +import { _extractMethodAndService, metadataCapture } from '../utils'; import { AttributeValues } from '../enums/AttributeValues'; import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; @@ -59,7 +59,7 @@ export class GrpcJsInstrumentation extends InstrumentationBase { constructor( name: string, version: string, - config?: GrpcInstrumentationConfig, + config?: GrpcInstrumentationConfig ) { super(name, version, config); this._metadataCapture = this._createMetadataCapture(); @@ -190,7 +190,9 @@ export class GrpcJsInstrumentation extends InstrumentationBase { kind: SpanKind.SERVER, }; - instrumentation._diag.debug(`patch func: ${JSON.stringify(spanOptions)}`); + instrumentation._diag.debug( + `patch func: ${JSON.stringify(spanOptions)}` + ); context.with( propagation.extract(ROOT_CONTEXT, call.metadata, { @@ -203,7 +205,8 @@ export class GrpcJsInstrumentation extends InstrumentationBase { const span = instrumentation.tracer .startSpan(spanName, spanOptions) .setAttributes({ - [SemanticAttributes.RPC_SYSTEM]: AttributeValues.RPC_SYSTEM, + [SemanticAttributes.RPC_SYSTEM]: + AttributeValues.RPC_SYSTEM, [SemanticAttributes.RPC_METHOD]: method, [SemanticAttributes.RPC_SERVICE]: service, }); @@ -307,10 +310,19 @@ export class GrpcJsInstrumentation extends InstrumentationBase { [SemanticAttributes.RPC_SERVICE]: service, }); - instrumentation._metadataCapture.client.captureRequestMetadata(span, metadata); + instrumentation._metadataCapture.client.captureRequestMetadata( + span, + metadata + ); return context.with(trace.setSpan(context.active(), span), () => - makeGrpcClientRemoteCall(instrumentation._metadataCapture, original, args, metadata, this)(span) + makeGrpcClientRemoteCall( + instrumentation._metadataCapture, + original, + args, + metadata, + this + )(span) ); } Object.assign(clientMethodTrace, original); @@ -351,9 +363,15 @@ export class GrpcJsInstrumentation extends InstrumentationBase { return { client: { - captureRequestMetadata: metadataCapture('request', config.metadataToSpanAttributes?.client?.requestMetadata ?? []), - captureResponseMetadata: metadataCapture('response', config.metadataToSpanAttributes?.client?.responseMetadata ?? []) - } + captureRequestMetadata: metadataCapture( + 'request', + config.metadataToSpanAttributes?.client?.requestMetadata ?? [] + ), + captureResponseMetadata: metadataCapture( + 'response', + config.metadataToSpanAttributes?.client?.responseMetadata ?? [] + ), + }, }; } } diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/serverUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/serverUtils.ts index c07cc56f2f..ecfbc3d957 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/serverUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/serverUtils.ts @@ -124,10 +124,7 @@ function clientStreamAndUnaryHandler( code: _grpcStatusCodeToOpenTelemetryStatusCode(err.code), message: err.message, }); - span.setAttribute( - SemanticAttributes.RPC_GRPC_STATUS_CODE, - err.code - ); + span.setAttribute(SemanticAttributes.RPC_GRPC_STATUS_CODE, err.code); } span.setAttributes({ [AttributeNames.GRPC_ERROR_NAME]: err.name, diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/types.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/types.ts index 06ee4125d0..e0ae1545d2 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/types.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc-js/types.ts @@ -55,7 +55,8 @@ export type GrpcClientFunc = ((...args: unknown[]) => GrpcEmitter) & { export type ServerRegisterFunction = typeof grpcJs.Server.prototype.register; -export type MakeClientConstructorFunction = typeof grpcJs.makeGenericClientConstructor; +export type MakeClientConstructorFunction = + typeof grpcJs.makeGenericClientConstructor; export type { HandleCall } from '@grpc/grpc-js/build/src/server-call'; export type { PackageDefinition } from '@grpc/grpc-js/build/src/make-client'; diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/clientUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/clientUtils.ts index b16681f99e..47de2ecf34 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/clientUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/clientUtils.ts @@ -18,12 +18,7 @@ import type * as grpcTypes from 'grpc'; import type * as events from 'events'; import { SendUnaryDataCallback, GrpcClientFunc } from './types'; import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; -import { - context, - Span, - SpanStatusCode, - propagation, -} from '@opentelemetry/api'; +import { context, Span, SpanStatusCode, propagation } from '@opentelemetry/api'; import { _grpcStatusCodeToSpanStatus, _grpcStatusCodeToOpenTelemetryStatusCode, @@ -56,10 +51,7 @@ export const makeGrpcClientRemoteCall = function ( if (err) { if (err.code) { span.setStatus(_grpcStatusCodeToSpanStatus(err.code)); - span.setAttribute( - SemanticAttributes.RPC_GRPC_STATUS_CODE, - err.code - ); + span.setAttribute(SemanticAttributes.RPC_GRPC_STATUS_CODE, err.code); } span.setAttributes({ [AttributeNames.GRPC_ERROR_NAME]: err.name, @@ -103,11 +95,12 @@ export const makeGrpcClientRemoteCall = function ( setSpanContext(metadata); const call = original.apply(self, args); - ((call as unknown) as events.EventEmitter).on( + (call as unknown as events.EventEmitter).on( 'metadata', responseMetadata => { metadataCapture.client.captureResponseMetadata(span, responseMetadata); - }); + } + ); // if server stream or bidi if (original.responseStream) { @@ -121,7 +114,7 @@ export const makeGrpcClientRemoteCall = function ( } }; context.bind(context.active(), call); - ((call as unknown) as events.EventEmitter).on( + (call as unknown as events.EventEmitter).on( 'error', (err: grpcTypes.ServiceError) => { span.setStatus({ @@ -132,14 +125,17 @@ export const makeGrpcClientRemoteCall = function ( [AttributeNames.GRPC_ERROR_NAME]: err.name, [AttributeNames.GRPC_ERROR_MESSAGE]: err.message, }); - if(err.code != null) { - span.setAttribute(SemanticAttributes.RPC_GRPC_STATUS_CODE, err.code); + if (err.code != null) { + span.setAttribute( + SemanticAttributes.RPC_GRPC_STATUS_CODE, + err.code + ); } endSpan(); } ); - ((call as unknown) as events.EventEmitter).on( + (call as unknown as events.EventEmitter).on( 'status', (status: grpcTypes.StatusObject) => { span.setStatus({ code: SpanStatusCode.UNSET }); diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/index.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/index.ts index 883bd01a38..dee3c9c3e3 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/index.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/index.ts @@ -41,9 +41,13 @@ import { serverStreamAndBidiHandler, } from './serverUtils'; import { makeGrpcClientRemoteCall, getMetadata } from './clientUtils'; -import {_extractMethodAndService, _methodIsIgnored, metadataCapture} from '../utils'; +import { + _extractMethodAndService, + _methodIsIgnored, + metadataCapture, +} from '../utils'; import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; -import {AttributeValues} from '../enums/AttributeValues'; +import { AttributeValues } from '../enums/AttributeValues'; /** * Holding reference to grpc module here to access constant of grpc modules @@ -53,7 +57,7 @@ let grpcClient: typeof grpcTypes; export class GrpcNativeInstrumentation extends InstrumentationBase< typeof grpcTypes - > { +> { private _metadataCapture: metadataCaptureType; constructor( @@ -196,7 +200,9 @@ export class GrpcNativeInstrumentation extends InstrumentationBase< kind: SpanKind.SERVER, }; - instrumentation._diag.debug(`patch func: ${JSON.stringify(spanOptions)}`); + instrumentation._diag.debug( + `patch func: ${JSON.stringify(spanOptions)}` + ); context.with( propagation.extract(context.active(), call.metadata, { @@ -209,7 +215,8 @@ export class GrpcNativeInstrumentation extends InstrumentationBase< const span = instrumentation.tracer .startSpan(spanName, spanOptions) .setAttributes({ - [SemanticAttributes.RPC_SYSTEM]: AttributeValues.RPC_SYSTEM, + [SemanticAttributes.RPC_SYSTEM]: + AttributeValues.RPC_SYSTEM, [SemanticAttributes.RPC_METHOD]: method, [SemanticAttributes.RPC_SERVICE]: service, }); @@ -305,16 +312,20 @@ export class GrpcNativeInstrumentation extends InstrumentationBase< const args = Array.prototype.slice.call(arguments); const metadata = getMetadata(grpcClient, original, args); const { service, method } = _extractMethodAndService(original.path); - const span = instrumentation.tracer.startSpan(name, { - kind: SpanKind.CLIENT, - }) + const span = instrumentation.tracer + .startSpan(name, { + kind: SpanKind.CLIENT, + }) .setAttributes({ [SemanticAttributes.RPC_SYSTEM]: AttributeValues.RPC_SYSTEM, [SemanticAttributes.RPC_METHOD]: method, [SemanticAttributes.RPC_SERVICE]: service, }); - instrumentation._metadataCapture.client.captureRequestMetadata(span, metadata); + instrumentation._metadataCapture.client.captureRequestMetadata( + span, + metadata + ); return context.with(trace.setSpan(context.active(), span), () => makeGrpcClientRemoteCall( @@ -336,9 +347,15 @@ export class GrpcNativeInstrumentation extends InstrumentationBase< return { client: { - captureRequestMetadata: metadataCapture('request', config.metadataToSpanAttributes?.client?.requestMetadata ?? []), - captureResponseMetadata: metadataCapture('response', config.metadataToSpanAttributes?.client?.responseMetadata ?? []) - } + captureRequestMetadata: metadataCapture( + 'request', + config.metadataToSpanAttributes?.client?.requestMetadata ?? [] + ), + captureResponseMetadata: metadataCapture( + 'response', + config.metadataToSpanAttributes?.client?.responseMetadata ?? [] + ), + }, }; } } diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/serverUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/serverUtils.ts index 6e0c62d746..d97a947443 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/serverUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/grpc/serverUtils.ts @@ -48,10 +48,7 @@ export const clientStreamAndUnaryHandler = function ( code: _grpcStatusCodeToOpenTelemetryStatusCode(err.code), message: err.message, }); - span.setAttribute( - SemanticAttributes.RPC_GRPC_STATUS_CODE, - err.code - ); + span.setAttribute(SemanticAttributes.RPC_GRPC_STATUS_CODE, err.code); } span.setAttributes({ [AttributeNames.GRPC_ERROR_NAME]: err.name, @@ -114,7 +111,7 @@ export const serverStreamAndBidiHandler = function ( [AttributeNames.GRPC_ERROR_NAME]: err.name, [AttributeNames.GRPC_ERROR_MESSAGE]: err.message, }); - if(err.code != null) { + if (err.code != null) { span.setAttribute(SemanticAttributes.RPC_GRPC_STATUS_CODE, err.code); } endSpan(); diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/instrumentation.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/instrumentation.ts index d6f782684b..c2a8946f97 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/instrumentation.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/instrumentation.ts @@ -31,9 +31,7 @@ export class GrpcInstrumentation { '@opentelemetry/instrumentation-grpc'; public readonly instrumentationVersion: string = VERSION; - constructor( - config?: GrpcInstrumentationConfig - ) { + constructor(config?: GrpcInstrumentationConfig) { this._grpcJsInstrumentation = new GrpcJsInstrumentation( this.instrumentationName, this.instrumentationVersion, diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/types.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/types.ts index 7fd56a5754..67940a48e0 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/types.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/types.ts @@ -29,15 +29,21 @@ export interface GrpcInstrumentationConfig extends InstrumentationConfig { /** Map the following gRPC metadata to span attributes. */ metadataToSpanAttributes?: { client?: { - responseMetadata?: string[], + responseMetadata?: string[]; requestMetadata?: string[]; - } - } + }; + }; } export type metadataCaptureType = { client: { - captureRequestMetadata: (span: Span, metadata: grpcJsTypes.Metadata | grpcTypes.Metadata) => void, - captureResponseMetadata: (span: Span, metadata: grpcJsTypes.Metadata | grpcTypes.Metadata) => void - } + captureRequestMetadata: ( + span: Span, + metadata: grpcJsTypes.Metadata | grpcTypes.Metadata + ) => void; + captureResponseMetadata: ( + span: Span, + metadata: grpcJsTypes.Metadata | grpcTypes.Metadata + ) => void; + }; }; diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/src/utils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/src/utils.ts index 989cfd76d6..473cc920a1 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/src/utils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/src/utils.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import {SpanStatusCode, SpanStatus, Span} from '@opentelemetry/api'; +import { SpanStatusCode, SpanStatus, Span } from '@opentelemetry/api'; import type * as grpcTypes from 'grpc'; import type * as grpcJsTypes from '@grpc/grpc-js'; import { IgnoreMatcher } from './types'; @@ -100,29 +100,38 @@ export const _methodIsIgnored = ( * Return method and service values getting from grpc name/path * @param name the grpc name/path */ -export const _extractMethodAndService = (name: string): { service: string, method: string } => { +export const _extractMethodAndService = ( + name: string +): { service: string; method: string } => { const serviceMethod = name.replace(/^\//, '').split('/'); const service = serviceMethod.shift() || ''; const method = serviceMethod.join('/'); - return ({ + return { service, - method - }); + method, + }; }; - -export function metadataCapture(type: 'request' | 'response', metadataToAdd: string[]) { - const normalizedMetadataAttributes = new Map(metadataToAdd.map(value => [value.toLowerCase(), value.toLowerCase().replace(/-/g, '_')])); +export function metadataCapture( + type: 'request' | 'response', + metadataToAdd: string[] +) { + const normalizedMetadataAttributes = new Map( + metadataToAdd.map(value => [ + value.toLowerCase(), + value.toLowerCase().replace(/-/g, '_'), + ]) + ); return (span: Span, metadata: grpcJsTypes.Metadata | grpcTypes.Metadata) => { - for (const [capturedMetadata, normalizedMetadata] of normalizedMetadataAttributes) { - const metadataValues = - metadata - .get(capturedMetadata) - .flatMap(value => - typeof value === 'string' ? value.toString() : [] - ); + for (const [ + capturedMetadata, + normalizedMetadata, + ] of normalizedMetadataAttributes) { + const metadataValues = metadata + .get(capturedMetadata) + .flatMap(value => (typeof value === 'string' ? value.toString() : [])); if (metadataValues === undefined || metadataValues === []) { continue; @@ -134,4 +143,3 @@ export function metadataCapture(type: 'request' | 'response', metadataToAdd: str } }; } - diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/test/helper.ts b/experimental/packages/opentelemetry-instrumentation-grpc/test/helper.ts index 2c6e469606..5b16525a5d 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/test/helper.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/test/helper.ts @@ -17,14 +17,17 @@ import { Attributes, context, - propagation, SpanKind, trace, + propagation, + SpanKind, + trace, } from '@opentelemetry/api'; import { W3CTraceContextPropagator } from '@opentelemetry/core'; import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; import { AsyncHooksContextManager } from '@opentelemetry/context-async-hooks'; import { ContextManager } from '@opentelemetry/api'; import { - InMemorySpanExporter, ReadableSpan, + InMemorySpanExporter, + ReadableSpan, SimpleSpanProcessor, } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; @@ -89,22 +92,25 @@ interface TestGrpcCall { } // Compare two arrays using an equal function f -const arrayIsEqual = (f: any) => ([x, ...xs]: any) => ([y, ...ys]: any): any => - x === undefined && y === undefined - ? true - : Boolean(f(x)(y)) && arrayIsEqual(f)(xs)(ys); +const arrayIsEqual = + (f: any) => + ([x, ...xs]: any) => + ([y, ...ys]: any): any => + x === undefined && y === undefined + ? true + : Boolean(f(x)(y)) && arrayIsEqual(f)(xs)(ys); // Return true if two requests has the same num value const requestEqual = (x: TestRequestResponse) => (y: TestRequestResponse) => x.num !== undefined && x.num === y.num; // Check if its equal requests or array of requests -const checkEqual = (x: TestRequestResponse | TestRequestResponse[]) => ( - y: TestRequestResponse | TestRequestResponse[] -) => - x instanceof Array && y instanceof Array - ? arrayIsEqual(requestEqual)(x as any)(y as any) - : !(x instanceof Array) && !(y instanceof Array) +const checkEqual = + (x: TestRequestResponse | TestRequestResponse[]) => + (y: TestRequestResponse | TestRequestResponse[]) => + x instanceof Array && y instanceof Array + ? arrayIsEqual(requestEqual)(x as any)(y as any) + : !(x instanceof Array) && !(y instanceof Array) ? requestEqual(x)(y) : false; @@ -306,7 +312,10 @@ export const runTests = ( // in those cases, erro.code = request.num // This method returns the request - unaryMethodWithMetadata(call: ServerUnaryCall, callback: RequestCallback) { + unaryMethodWithMetadata( + call: ServerUnaryCall, + callback: RequestCallback + ) { const serverMetadata: any = new grpc.Metadata(); serverMetadata.add('server_metadata_key', 'server_metadata_value'); @@ -314,11 +323,11 @@ export const runTests = ( call.request.num <= MAX_ERROR_STATUS ? callback( - getError( - 'Unary Method with Metadata Error', - call.request.num - ) as grpcJs.ServiceError - ) + getError( + 'Unary Method with Metadata Error', + call.request.num + ) as grpcJs.ServiceError + ) : callback(null, { num: call.request.num }); }, @@ -326,11 +335,11 @@ export const runTests = ( unaryMethod(call: ServerUnaryCall, callback: RequestCallback) { call.request.num <= MAX_ERROR_STATUS ? callback( - getError( - 'Unary Method Error', - call.request.num - ) as grpcJs.ServiceError - ) + getError( + 'Unary Method Error', + call.request.num + ) as grpcJs.ServiceError + ) : callback(null, { num: call.request.num }); }, @@ -342,7 +351,7 @@ export const runTests = ( 'Unary Method Error', call.request.num ) as grpcJs.ServiceError - ) + ) : callback(null, { num: call.request.num }); }, @@ -499,26 +508,17 @@ export const runTests = ( clientSpan: ReadableSpan, methodName: string, attributesValidation?: { - serverAttributes?: Attributes, - clientAttributes?: Attributes - }) => { + serverAttributes?: Attributes; + clientAttributes?: Attributes; + } + ) => { const validations = { name: `grpc.pkg_test.GrpcTester/${methodName}`, status: grpc.status.OK, }; - assertSpan( - moduleName, - serverSpan, - SpanKind.SERVER, - validations - ); - assertSpan( - moduleName, - clientSpan, - SpanKind.CLIENT, - validations - ); + assertSpan(moduleName, serverSpan, SpanKind.SERVER, validations); + assertSpan(moduleName, clientSpan, SpanKind.CLIENT, validations); assertPropagation(serverSpan, clientSpan); @@ -546,9 +546,10 @@ export const runTests = ( provider: NodeTracerProvider, checkSpans = true, attributesValidation?: { - serverAttributes?: Attributes, - clientAttributes?: Attributes - }) => { + serverAttributes?: Attributes; + clientAttributes?: Attributes; + } + ) => { it(`should ${ checkSpans ? 'do' : 'not' }: create a rootSpan for client and a childSpan for server - ${ @@ -560,7 +561,8 @@ export const runTests = ( .then((result: TestRequestResponse | TestRequestResponse[]) => { assert.ok( checkEqual(result)(method.result), - 'gRPC call returns correct values'); + 'gRPC call returns correct values' + ); const spans = memoryExporter.getFinishedSpans(); if (checkSpans) { @@ -570,7 +572,12 @@ export const runTests = ( const serverSpan = spans[0]; const clientSpan = spans[1]; - validateSpans(serverSpan, clientSpan, method.methodName, attributesValidation); + validateSpans( + serverSpan, + clientSpan, + method.methodName, + attributesValidation + ); } else { assert.strictEqual(spans.length, 0); } @@ -583,9 +590,9 @@ export const runTests = ( provider: NodeTracerProvider, checkSpans = true, attributesValidation?: { - serverAttributes?: Attributes, - clientAttributes?: Attributes - } + serverAttributes?: Attributes; + clientAttributes?: Attributes; + } ) => { it(`should raise an error for client childSpan/server rootSpan - ${method.description} - status = OK`, () => { const expectEmpty = memoryExporter.getFinishedSpans(); @@ -593,7 +600,7 @@ export const runTests = ( const span = provider .getTracer('default') - .startSpan('TestSpan', {kind: SpanKind.PRODUCER}); + .startSpan('TestSpan', { kind: SpanKind.PRODUCER }); return context.with(trace.setSpan(context.active(), span), async () => { const rootSpan = trace.getSpan(context.active()); if (!rootSpan) { @@ -612,7 +619,12 @@ export const runTests = ( const serverSpan = spans[0]; const clientSpan = spans[1]; - validateSpans(serverSpan, clientSpan, method.methodName, attributesValidation); + validateSpans( + serverSpan, + clientSpan, + method.methodName, + attributesValidation + ); assert.strictEqual( rootSpan.spanContext().traceId, @@ -636,10 +648,16 @@ export const runTests = ( provider: NodeTracerProvider, checkSpans = true, attributesValidation: { - serverAttributes?: Attributes, - clientAttributes?: Attributes - }) => { - ClientServerValidationTest(method, provider, checkSpans, attributesValidation); + serverAttributes?: Attributes; + clientAttributes?: Attributes; + } + ) => { + ClientServerValidationTest( + method, + provider, + checkSpans, + attributesValidation + ); ErrorValidationTest(method, provider, checkSpans, attributesValidation); }; @@ -652,10 +670,10 @@ export const runTests = ( ErrorValidationTest(method, provider, checkSpans); }; - const insertError = ( - request: TestRequestResponse | TestRequestResponse[] - ) => (code: number) => - request instanceof Array ? [{ num: code }, ...request] : { num: code }; + const insertError = + (request: TestRequestResponse | TestRequestResponse[]) => + (code: number) => + request instanceof Array ? [{ num: code }, ...request] : { num: code }; const runErrorTest = ( method: typeof methodList[0], @@ -737,9 +755,7 @@ export const runTests = ( }); }; - const runClientMethodTest = ( - method: typeof methodList[0] - ) => { + const runClientMethodTest = (method: typeof methodList[0]) => { it(`should assign original properties for grpc remote method ${method.methodName}`, async () => { const patchedClientMethod = (client as any)[method.methodName]; const properties = Object.keys(patchedClientMethod); @@ -941,7 +957,7 @@ export const runTests = ( method: grpcClient.unaryMethodWithMetadata, request: requestList[0], result: requestList[0], - metadata: clientMetadata + metadata: clientMetadata, }; beforeEach(() => { @@ -954,9 +970,9 @@ export const runTests = ( metadataToSpanAttributes: { client: { requestMetadata: ['client_metadata_key'], - responseMetadata: ['server_metadata_key'] - } - } + responseMetadata: ['server_metadata_key'], + }, + }, }); plugin.setTracerProvider(provider); @@ -981,11 +997,17 @@ export const runTests = ( const attributeValidation = { clientAttributes: { 'rpc.request.metadata.client_metadata_key': 'client_metadata_value', - 'rpc.response.metadata.server_metadata_key': 'server_metadata_value' - } + 'rpc.response.metadata.server_metadata_key': + 'server_metadata_value', + }, }; - runTestWithAttributeValidation(customMetadataMethod, provider, true, attributeValidation); + runTestWithAttributeValidation( + customMetadataMethod, + provider, + true, + attributeValidation + ); }); }); }); diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts index 2264d17b81..64ed00df76 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts @@ -62,7 +62,10 @@ export const assertSpan = ( span.status.code, grpcStatusCodeToOpenTelemetryStatusCode(validations.status) ); - assert.strictEqual(span.attributes[SemanticAttributes.RPC_GRPC_STATUS_CODE], validations.status); + assert.strictEqual( + span.attributes[SemanticAttributes.RPC_GRPC_STATUS_CODE], + validations.status + ); }; // Check if sourceSpan was propagated to targetSpan diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/extractMethodAndServiceUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/extractMethodAndServiceUtils.ts index 2d4faf744e..b63058d875 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/extractMethodAndServiceUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/extractMethodAndServiceUtils.ts @@ -17,21 +17,34 @@ import { _extractMethodAndService } from '../../src/utils'; import * as assert from 'assert'; - const cases = [ - { value: 'readBooks/BookStorage.Book', result: { method: 'BookStorage.Book', service: 'readBooks' } }, - { value: 'readBooks//BookStorage.Book', result: { method: '/BookStorage.Book', service: 'readBooks' } }, - { value: 'readBooks/BookStorage/.Book', result: { method: 'BookStorage/.Book', service: 'readBooks' } }, - { value: '/readBooks/BookStorage/.Book/Book', result: { method: 'BookStorage/.Book/Book', service: 'readBooks' } }, + { + value: 'readBooks/BookStorage.Book', + result: { method: 'BookStorage.Book', service: 'readBooks' }, + }, + { + value: 'readBooks//BookStorage.Book', + result: { method: '/BookStorage.Book', service: 'readBooks' }, + }, + { + value: 'readBooks/BookStorage/.Book', + result: { method: 'BookStorage/.Book', service: 'readBooks' }, + }, + { + value: '/readBooks/BookStorage/.Book/Book', + result: { method: 'BookStorage/.Book/Book', service: 'readBooks' }, + }, ]; describe('ExtractMethodAndService Util', () => { - cases.forEach(({ value, result }) => { it(`Should resolve use case correctly for: ${value}`, () => { const { method, service } = _extractMethodAndService(value); - assert.deepStrictEqual({ method, service }, { method: result.method, service: result.service }); + assert.deepStrictEqual( + { method, service }, + { method: result.method, service: result.service } + ); }); }); }); diff --git a/experimental/packages/opentelemetry-instrumentation-http/src/http.ts b/experimental/packages/opentelemetry-instrumentation-http/src/http.ts index 0f19ffd867..e2b649368e 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/src/http.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/src/http.ts @@ -27,9 +27,14 @@ import { trace, Histogram, MetricAttributes, - ValueType + ValueType, } from '@opentelemetry/api'; -import { hrTime, hrTimeDuration, hrTimeToMilliseconds, suppressTracing } from '@opentelemetry/core'; +import { + hrTime, + hrTimeDuration, + hrTimeToMilliseconds, + suppressTracing, +} from '@opentelemetry/core'; import type * as http from 'http'; import type * as https from 'https'; import { Socket } from 'net'; @@ -67,25 +72,27 @@ export class HttpInstrumentation extends InstrumentationBase { private _httpClientDurationHistogram!: Histogram; constructor(config?: HttpInstrumentationConfig) { - super( - '@opentelemetry/instrumentation-http', - VERSION, - config - ); + super('@opentelemetry/instrumentation-http', VERSION, config); this._headerCapture = this._createHeaderCapture(); } protected override _updateMetricInstruments() { - this._httpServerDurationHistogram = this.meter.createHistogram('http.server.duration', { - description: 'measures the duration of the inbound HTTP requests', - unit: 'ms', - valueType: ValueType.DOUBLE - }); - this._httpClientDurationHistogram = this.meter.createHistogram('http.client.duration', { - description: 'measures the duration of the outbound HTTP requests', - unit: 'ms', - valueType: ValueType.DOUBLE - }); + this._httpServerDurationHistogram = this.meter.createHistogram( + 'http.server.duration', + { + description: 'measures the duration of the inbound HTTP requests', + unit: 'ms', + valueType: ValueType.DOUBLE, + } + ); + this._httpClientDurationHistogram = this.meter.createHistogram( + 'http.client.duration', + { + description: 'measures the duration of the outbound HTTP requests', + unit: 'ms', + valueType: ValueType.DOUBLE, + } + ); } private _getConfig(): HttpInstrumentationConfig { @@ -97,7 +104,10 @@ export class HttpInstrumentation extends InstrumentationBase { this._headerCapture = this._createHeaderCapture(); } - init(): [InstrumentationNodeModuleDefinition, InstrumentationNodeModuleDefinition] { + init(): [ + InstrumentationNodeModuleDefinition, + InstrumentationNodeModuleDefinition + ] { return [this._getHttpsInstrumentation(), this._getHttpInstrumentation()]; } @@ -191,7 +201,9 @@ export class HttpInstrumentation extends InstrumentationBase { * Creates spans for incoming requests, restoring spans' context if applied. */ protected _getPatchIncomingRequestFunction(component: 'http' | 'https') { - return (original: (event: string, ...args: unknown[]) => boolean): (this: unknown, event: string, ...args: unknown[]) => boolean => { + return ( + original: (event: string, ...args: unknown[]) => boolean + ): ((this: unknown, event: string, ...args: unknown[]) => boolean) => { return this._incomingRequestFunction(component, original); }; } @@ -313,18 +325,25 @@ export class HttpInstrumentation extends InstrumentationBase { request.prependListener( 'response', (response: http.IncomingMessage & { aborted?: boolean }) => { - const responseAttributes = utils.getOutgoingRequestAttributesOnResponse( - response, - ); + const responseAttributes = + utils.getOutgoingRequestAttributesOnResponse(response); span.setAttributes(responseAttributes); - metricAttributes = Object.assign(metricAttributes, utils.getOutgoingRequestMetricAttributesOnResponse(responseAttributes)); + metricAttributes = Object.assign( + metricAttributes, + utils.getOutgoingRequestMetricAttributesOnResponse(responseAttributes) + ); if (this._getConfig().responseHook) { this._callResponseHook(span, response); } - this._headerCapture.client.captureRequestHeaders(span, header => request.getHeader(header)); - this._headerCapture.client.captureResponseHeaders(span, header => response.headers[header]); + this._headerCapture.client.captureRequestHeaders(span, header => + request.getHeader(header) + ); + this._headerCapture.client.captureResponseHeaders( + span, + header => response.headers[header] + ); context.bind(context.active(), response); this._diag.debug('outgoingRequest on response()'); @@ -335,7 +354,12 @@ export class HttpInstrumentation extends InstrumentationBase { if (response.aborted && !response.complete) { status = { code: SpanStatusCode.ERROR }; } else { - status = { code: utils.parseResponseStatus(SpanKind.CLIENT, response.statusCode) }; + status = { + code: utils.parseResponseStatus( + SpanKind.CLIENT, + response.statusCode + ), + }; } span.setStatus(status); @@ -348,19 +372,32 @@ export class HttpInstrumentation extends InstrumentationBase { request, response ), - () => { }, + () => {}, true ); } - this._closeHttpSpan(span, SpanKind.CLIENT, startTime, metricAttributes); + this._closeHttpSpan( + span, + SpanKind.CLIENT, + startTime, + metricAttributes + ); }); response.on(errorMonitor, (error: Err) => { this._diag.debug('outgoingRequest on error()', error); utils.setSpanWithError(span, error); - const code = utils.parseResponseStatus(SpanKind.CLIENT, response.statusCode); + const code = utils.parseResponseStatus( + SpanKind.CLIENT, + response.statusCode + ); span.setStatus({ code, message: error.message }); - this._closeHttpSpan(span, SpanKind.CLIENT, startTime, metricAttributes); + this._closeHttpSpan( + span, + SpanKind.CLIENT, + startTime, + metricAttributes + ); }); } ); @@ -402,19 +439,26 @@ export class HttpInstrumentation extends InstrumentationBase { : '/'; const method = request.method || 'GET'; - instrumentation._diag.debug(`${component} instrumentation incomingRequest`); + instrumentation._diag.debug( + `${component} instrumentation incomingRequest` + ); if ( utils.isIgnored( pathname, instrumentation._getConfig().ignoreIncomingPaths, - (e: unknown) => instrumentation._diag.error('caught ignoreIncomingPaths error: ', e) + (e: unknown) => + instrumentation._diag.error('caught ignoreIncomingPaths error: ', e) ) || safeExecuteInTheMiddle( - () => instrumentation._getConfig().ignoreIncomingRequestHook?.(request), + () => + instrumentation._getConfig().ignoreIncomingRequestHook?.(request), (e: unknown) => { if (e != null) { - instrumentation._diag.error('caught ignoreIncomingRequestHook error: ', e); + instrumentation._diag.error( + 'caught ignoreIncomingRequestHook error: ', + e + ); } }, true @@ -444,7 +488,8 @@ export class HttpInstrumentation extends InstrumentationBase { }; const startTime = hrTime(); - let metricAttributes: MetricAttributes = utils.getIncomingRequestMetricAttributes(spanAttributes); + let metricAttributes: MetricAttributes = + utils.getIncomingRequestMetricAttributes(spanAttributes); const ctx = propagation.extract(ROOT_CONTEXT, headers); const span = instrumentation._startHttpSpan( @@ -470,7 +515,10 @@ export class HttpInstrumentation extends InstrumentationBase { instrumentation._callResponseHook(span, response); } - instrumentation._headerCapture.server.captureRequestHeaders(span, header => request.headers[header]); + instrumentation._headerCapture.server.captureRequestHeaders( + span, + header => request.headers[header] + ); // Wraps end (inspired by: // https://github.com/GoogleCloudPlatform/cloud-trace-nodejs/blob/master/src/instrumentations/instrumentation-connect.ts#L75) @@ -486,7 +534,12 @@ export class HttpInstrumentation extends InstrumentationBase { error => { if (error) { utils.setSpanWithError(span, error); - instrumentation._closeHttpSpan(span, SpanKind.SERVER, startTime, metricAttributes); + instrumentation._closeHttpSpan( + span, + SpanKind.SERVER, + startTime, + metricAttributes + ); throw error; } } @@ -496,13 +549,22 @@ export class HttpInstrumentation extends InstrumentationBase { request, response ); - metricAttributes = Object.assign(metricAttributes, utils.getIncomingRequestMetricAttributesOnResponse(attributes)); + metricAttributes = Object.assign( + metricAttributes, + utils.getIncomingRequestMetricAttributesOnResponse(attributes) + ); - instrumentation._headerCapture.server.captureResponseHeaders(span, header => response.getHeader(header)); + instrumentation._headerCapture.server.captureResponseHeaders( + span, + header => response.getHeader(header) + ); - span - .setAttributes(attributes) - .setStatus({ code: utils.parseResponseStatus(SpanKind.SERVER, response.statusCode) }); + span.setAttributes(attributes).setStatus({ + code: utils.parseResponseStatus( + SpanKind.SERVER, + response.statusCode + ), + }); if (instrumentation._getConfig().applyCustomAttributesOnSpan) { safeExecuteInTheMiddle( @@ -512,12 +574,17 @@ export class HttpInstrumentation extends InstrumentationBase { request, response ), - () => { }, + () => {}, true ); } - instrumentation._closeHttpSpan(span, SpanKind.SERVER, startTime, metricAttributes); + instrumentation._closeHttpSpan( + span, + SpanKind.SERVER, + startTime, + metricAttributes + ); return returned; }; @@ -526,7 +593,12 @@ export class HttpInstrumentation extends InstrumentationBase { error => { if (error) { utils.setSpanWithError(span, error); - instrumentation._closeHttpSpan(span, SpanKind.SERVER, startTime, metricAttributes); + instrumentation._closeHttpSpan( + span, + SpanKind.SERVER, + startTime, + metricAttributes + ); throw error; } } @@ -551,7 +623,7 @@ export class HttpInstrumentation extends InstrumentationBase { } const extraOptions = typeof args[0] === 'object' && - (typeof options === 'string' || options instanceof url.URL) + (typeof options === 'string' || options instanceof url.URL) ? (args.shift() as http.RequestOptions) : undefined; const { origin, pathname, method, optionsParsed } = utils.getRequestInfo( @@ -575,13 +647,20 @@ export class HttpInstrumentation extends InstrumentationBase { utils.isIgnored( origin + pathname, instrumentation._getConfig().ignoreOutgoingUrls, - (e: unknown) => instrumentation._diag.error('caught ignoreOutgoingUrls error: ', e) + (e: unknown) => + instrumentation._diag.error('caught ignoreOutgoingUrls error: ', e) ) || safeExecuteInTheMiddle( - () => instrumentation._getConfig().ignoreOutgoingRequestHook?.(optionsParsed), + () => + instrumentation + ._getConfig() + .ignoreOutgoingRequestHook?.(optionsParsed), (e: unknown) => { if (e != null) { - instrumentation._diag.error('caught ignoreOutgoingRequestHook error: ', e); + instrumentation._diag.error( + 'caught ignoreOutgoingRequestHook error: ', + e + ); } }, true @@ -604,7 +683,8 @@ export class HttpInstrumentation extends InstrumentationBase { }); const startTime = hrTime(); - const metricAttributes: MetricAttributes = utils.getOutgoingRequestMetricAttributes(attributes); + const metricAttributes: MetricAttributes = + utils.getOutgoingRequestMetricAttributes(attributes); const spanOptions: SpanOptions = { kind: SpanKind.CLIENT, @@ -635,13 +715,20 @@ export class HttpInstrumentation extends InstrumentationBase { error => { if (error) { utils.setSpanWithError(span, error); - instrumentation._closeHttpSpan(span, SpanKind.CLIENT, startTime, metricAttributes); + instrumentation._closeHttpSpan( + span, + SpanKind.CLIENT, + startTime, + metricAttributes + ); throw error; } } ); - instrumentation._diag.debug(`${component} instrumentation outgoingRequest`); + instrumentation._diag.debug( + `${component} instrumentation outgoingRequest` + ); context.bind(parentContext, request); return instrumentation._traceClientRequest( request, @@ -682,7 +769,12 @@ export class HttpInstrumentation extends InstrumentationBase { return span; } - private _closeHttpSpan(span: Span, spanKind: SpanKind, startTime: HrTime, metricAttributes: MetricAttributes) { + private _closeHttpSpan( + span: Span, + spanKind: SpanKind, + startTime: HrTime, + metricAttributes: MetricAttributes + ) { if (!this._spanNotEnded.has(span)) { return; } @@ -705,7 +797,7 @@ export class HttpInstrumentation extends InstrumentationBase { ) { safeExecuteInTheMiddle( () => this._getConfig().responseHook!(span, response), - () => { }, + () => {}, true ); } @@ -716,19 +808,19 @@ export class HttpInstrumentation extends InstrumentationBase { ) { safeExecuteInTheMiddle( () => this._getConfig().requestHook!(span, request), - () => { }, + () => {}, true ); } private _callStartSpanHook( request: http.IncomingMessage | http.RequestOptions, - hookFunc: Function | undefined, + hookFunc: Function | undefined ) { if (typeof hookFunc === 'function') { return safeExecuteInTheMiddle( () => hookFunc(request), - () => { }, + () => {}, true ); } @@ -739,13 +831,25 @@ export class HttpInstrumentation extends InstrumentationBase { return { client: { - captureRequestHeaders: utils.headerCapture('request', config.headersToSpanAttributes?.client?.requestHeaders ?? []), - captureResponseHeaders: utils.headerCapture('response', config.headersToSpanAttributes?.client?.responseHeaders ?? []) + captureRequestHeaders: utils.headerCapture( + 'request', + config.headersToSpanAttributes?.client?.requestHeaders ?? [] + ), + captureResponseHeaders: utils.headerCapture( + 'response', + config.headersToSpanAttributes?.client?.responseHeaders ?? [] + ), }, server: { - captureRequestHeaders: utils.headerCapture('request', config.headersToSpanAttributes?.server?.requestHeaders ?? []), - captureResponseHeaders: utils.headerCapture('response', config.headersToSpanAttributes?.server?.responseHeaders ?? []), - } + captureRequestHeaders: utils.headerCapture( + 'request', + config.headersToSpanAttributes?.server?.requestHeaders ?? [] + ), + captureResponseHeaders: utils.headerCapture( + 'response', + config.headersToSpanAttributes?.server?.responseHeaders ?? [] + ), + }, }; } } diff --git a/experimental/packages/opentelemetry-instrumentation-http/src/types.ts b/experimental/packages/opentelemetry-instrumentation-http/src/types.ts index c0f2adab34..2d3377c53f 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/src/types.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/src/types.ts @@ -13,10 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { - Span, - SpanAttributes, -} from '@opentelemetry/api'; +import { Span, SpanAttributes } from '@opentelemetry/api'; import type * as http from 'http'; import type * as https from 'https'; import { @@ -64,11 +61,11 @@ export interface HttpCustomAttributeFunction { } export interface IgnoreIncomingRequestFunction { - (request: IncomingMessage ): boolean; + (request: IncomingMessage): boolean; } export interface IgnoreOutgoingRequestFunction { - (request: RequestOptions ): boolean; + (request: RequestOptions): boolean; } export interface HttpRequestCustomAttributeFunction { @@ -80,11 +77,11 @@ export interface HttpResponseCustomAttributeFunction { } export interface StartIncomingSpanCustomAttributeFunction { - (request: IncomingMessage ): SpanAttributes; + (request: IncomingMessage): SpanAttributes; } export interface StartOutgoingSpanCustomAttributeFunction { - (request: RequestOptions ): SpanAttributes; + (request: RequestOptions): SpanAttributes; } /** @@ -123,9 +120,9 @@ export interface HttpInstrumentationConfig extends InstrumentationConfig { requireParentforIncomingSpans?: boolean; /** Map the following HTTP headers to span attributes. */ headersToSpanAttributes?: { - client?: { requestHeaders?: string[]; responseHeaders?: string[]; }, - server?: { requestHeaders?: string[]; responseHeaders?: string[]; }, - } + client?: { requestHeaders?: string[]; responseHeaders?: string[] }; + server?: { requestHeaders?: string[]; responseHeaders?: string[] }; + }; } export interface Err extends Error { diff --git a/experimental/packages/opentelemetry-instrumentation-http/src/utils.ts b/experimental/packages/opentelemetry-instrumentation-http/src/utils.ts index 4094586493..4f91ad4cfd 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/src/utils.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/src/utils.ts @@ -69,7 +69,10 @@ export const getAbsoluteUrl = ( /** * Parse status code from HTTP response. [More details](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/data-http.md#status) */ -export const parseResponseStatus = (kind: SpanKind, statusCode?: number): SpanStatusCode => { +export const parseResponseStatus = ( + kind: SpanKind, + statusCode?: number +): SpanStatusCode => { const upperBound = kind === SpanKind.CLIENT ? 400 : 500; // 1xx, 2xx, 3xx are OK on client and server // 4xx is OK on server @@ -139,10 +142,7 @@ export const isIgnored = ( * @param {Span} span the span that need to be set * @param {Error} error error that will be set to span */ -export const setSpanWithError = ( - span: Span, - error: Err -): void => { +export const setSpanWithError = (span: Span, error: Err): void => { const message = error.message; span.setAttributes({ @@ -169,9 +169,8 @@ export const setRequestContentLengthAttribute = ( if (isCompressed(request.headers)) { attributes[SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH] = length; } else { - attributes[ - SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH_UNCOMPRESSED - ] = length; + attributes[SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH_UNCOMPRESSED] = + length; } }; @@ -190,9 +189,8 @@ export const setResponseContentLengthAttribute = ( if (isCompressed(response.headers)) { attributes[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH] = length; } else { - attributes[ - SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH_UNCOMPRESSED - ] = length; + attributes[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH_UNCOMPRESSED] = + length; } }; @@ -225,7 +223,12 @@ export const isCompressed = ( export const getRequestInfo = ( options: url.URL | RequestOptions | string, extraOptions?: RequestOptions -): { origin: string; pathname: string; method: string; optionsParsed: RequestOptions; } => { +): { + origin: string; + pathname: string; + method: string; + optionsParsed: RequestOptions; +} => { let pathname = '/'; let origin = ''; let optionsParsed: RequestOptions; @@ -265,22 +268,29 @@ export const getRequestInfo = ( if (!pathname && optionsParsed.path) { pathname = url.parse(optionsParsed.path).pathname || '/'; } - const hostname = optionsParsed.host || (optionsParsed.port != null ? `${optionsParsed.hostname}${optionsParsed.port}` : optionsParsed.hostname); + const hostname = + optionsParsed.host || + (optionsParsed.port != null + ? `${optionsParsed.hostname}${optionsParsed.port}` + : optionsParsed.hostname); origin = `${optionsParsed.protocol || 'http:'}//${hostname}`; } const headers = optionsParsed.headers ?? {}; - optionsParsed.headers = Object.keys(headers).reduce((normalizedHeader, key) => { - normalizedHeader[key.toLowerCase()] = headers[key]; - return normalizedHeader; - }, {} as OutgoingHttpHeaders); + optionsParsed.headers = Object.keys(headers).reduce( + (normalizedHeader, key) => { + normalizedHeader[key.toLowerCase()] = headers[key]; + return normalizedHeader; + }, + {} as OutgoingHttpHeaders + ); // some packages return method in lowercase.. // ensure upperCase for consistency const method = optionsParsed.method ? optionsParsed.method.toUpperCase() : 'GET'; - return { origin, pathname, method, optionsParsed, }; + return { origin, pathname, method, optionsParsed }; }; /** @@ -297,13 +307,17 @@ export const isValidOptionsType = (options: unknown): boolean => { }; export const extractHostnameAndPort = ( - requestOptions: Pick -): { hostname: string, port: number | string } => { + requestOptions: Pick< + ParsedRequestOptions, + 'hostname' | 'host' | 'port' | 'protocol' + > +): { hostname: string; port: number | string } => { if (requestOptions.hostname && requestOptions.port) { return { hostname: requestOptions.hostname, port: requestOptions.port }; } const matches = requestOptions.host?.match(/^([^:/ ]+)(:\d{1,5})?/) || null; - const hostname = requestOptions.hostname || (matches === null ? 'localhost' : matches[1]); + const hostname = + requestOptions.hostname || (matches === null ? 'localhost' : matches[1]); let port = requestOptions.port; if (!port) { if (matches && matches[2]) { @@ -323,7 +337,12 @@ export const extractHostnameAndPort = ( */ export const getOutgoingRequestAttributes = ( requestOptions: ParsedRequestOptions, - options: { component: string; hostname: string; port: string | number, hookAttributes?: SpanAttributes } + options: { + component: string; + hostname: string; + port: string | number; + hookAttributes?: SpanAttributes; + } ): SpanAttributes => { const hostname = options.hostname; const port = options.port; @@ -340,7 +359,8 @@ export const getOutgoingRequestAttributes = ( [SemanticAttributes.HTTP_METHOD]: method, [SemanticAttributes.HTTP_TARGET]: requestOptions.path || '/', [SemanticAttributes.NET_PEER_NAME]: hostname, - [SemanticAttributes.HTTP_HOST]: requestOptions.headers?.host ?? `${hostname}:${port}`, + [SemanticAttributes.HTTP_HOST]: + requestOptions.headers?.host ?? `${hostname}:${port}`, }; if (userAgent !== undefined) { @@ -357,8 +377,10 @@ export const getOutgoingRequestMetricAttributes = ( spanAttributes: SpanAttributes ): MetricAttributes => { const metricAttributes: MetricAttributes = {}; - metricAttributes[SemanticAttributes.HTTP_METHOD] = spanAttributes[SemanticAttributes.HTTP_METHOD]; - metricAttributes[SemanticAttributes.NET_PEER_NAME] = spanAttributes[SemanticAttributes.NET_PEER_NAME]; + metricAttributes[SemanticAttributes.HTTP_METHOD] = + spanAttributes[SemanticAttributes.HTTP_METHOD]; + metricAttributes[SemanticAttributes.NET_PEER_NAME] = + spanAttributes[SemanticAttributes.NET_PEER_NAME]; //TODO: http.url attribute, it should susbtitute any parameters to avoid high cardinality. return metricAttributes; }; @@ -386,7 +408,7 @@ export const getAttributesFromHttpKind = (kind?: string): SpanAttributes => { * @param {{ hostname: string }} options used to pass data needed to create attributes */ export const getOutgoingRequestAttributesOnResponse = ( - response: IncomingMessage, + response: IncomingMessage ): SpanAttributes => { const { statusCode, statusMessage, httpVersion, socket } = response; const { remoteAddress, remotePort } = socket; @@ -415,9 +437,12 @@ export const getOutgoingRequestMetricAttributesOnResponse = ( spanAttributes: SpanAttributes ): MetricAttributes => { const metricAttributes: MetricAttributes = {}; - metricAttributes[SemanticAttributes.NET_PEER_PORT] = spanAttributes[SemanticAttributes.NET_PEER_PORT]; - metricAttributes[SemanticAttributes.HTTP_STATUS_CODE] = spanAttributes[SemanticAttributes.HTTP_STATUS_CODE]; - metricAttributes[SemanticAttributes.HTTP_FLAVOR] = spanAttributes[SemanticAttributes.HTTP_FLAVOR]; + metricAttributes[SemanticAttributes.NET_PEER_PORT] = + spanAttributes[SemanticAttributes.NET_PEER_PORT]; + metricAttributes[SemanticAttributes.HTTP_STATUS_CODE] = + spanAttributes[SemanticAttributes.HTTP_STATUS_CODE]; + metricAttributes[SemanticAttributes.HTTP_FLAVOR] = + spanAttributes[SemanticAttributes.HTTP_FLAVOR]; return metricAttributes; }; @@ -428,7 +453,11 @@ export const getOutgoingRequestMetricAttributesOnResponse = ( */ export const getIncomingRequestAttributes = ( request: IncomingMessage, - options: { component: string; serverName?: string; hookAttributes?: SpanAttributes } + options: { + component: string; + serverName?: string; + hookAttributes?: SpanAttributes; + } ): SpanAttributes => { const headers = request.headers; const userAgent = headers['user-agent']; @@ -484,10 +513,14 @@ export const getIncomingRequestMetricAttributes = ( spanAttributes: SpanAttributes ): MetricAttributes => { const metricAttributes: MetricAttributes = {}; - metricAttributes[SemanticAttributes.HTTP_SCHEME] = spanAttributes[SemanticAttributes.HTTP_SCHEME]; - metricAttributes[SemanticAttributes.HTTP_METHOD] = spanAttributes[SemanticAttributes.HTTP_METHOD]; - metricAttributes[SemanticAttributes.NET_HOST_NAME] = spanAttributes[SemanticAttributes.NET_HOST_NAME]; - metricAttributes[SemanticAttributes.HTTP_FLAVOR] = spanAttributes[SemanticAttributes.HTTP_FLAVOR]; + metricAttributes[SemanticAttributes.HTTP_SCHEME] = + spanAttributes[SemanticAttributes.HTTP_SCHEME]; + metricAttributes[SemanticAttributes.HTTP_METHOD] = + spanAttributes[SemanticAttributes.HTTP_METHOD]; + metricAttributes[SemanticAttributes.NET_HOST_NAME] = + spanAttributes[SemanticAttributes.NET_HOST_NAME]; + metricAttributes[SemanticAttributes.HTTP_FLAVOR] = + spanAttributes[SemanticAttributes.HTTP_FLAVOR]; //TODO: http.target attribute, it should susbtitute any parameters to avoid high cardinality. return metricAttributes; }; @@ -530,15 +563,25 @@ export const getIncomingRequestMetricAttributesOnResponse = ( spanAttributes: SpanAttributes ): MetricAttributes => { const metricAttributes: MetricAttributes = {}; - metricAttributes[SemanticAttributes.HTTP_STATUS_CODE] = spanAttributes[SemanticAttributes.HTTP_STATUS_CODE]; - metricAttributes[SemanticAttributes.NET_HOST_PORT] = spanAttributes[SemanticAttributes.NET_HOST_PORT]; + metricAttributes[SemanticAttributes.HTTP_STATUS_CODE] = + spanAttributes[SemanticAttributes.HTTP_STATUS_CODE]; + metricAttributes[SemanticAttributes.NET_HOST_PORT] = + spanAttributes[SemanticAttributes.NET_HOST_PORT]; return metricAttributes; }; export function headerCapture(type: 'request' | 'response', headers: string[]) { - const normalizedHeaders = new Map(headers.map(header => [header.toLowerCase(), header.toLowerCase().replace(/-/g, '_')])); - - return (span: Span, getHeader: (key: string) => undefined | string | string[] | number) => { + const normalizedHeaders = new Map( + headers.map(header => [ + header.toLowerCase(), + header.toLowerCase().replace(/-/g, '_'), + ]) + ); + + return ( + span: Span, + getHeader: (key: string) => undefined | string | string[] | number + ) => { for (const [capturedHeader, normalizedHeader] of normalizedHeaders) { const value = getHeader(capturedHeader); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-disable.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-disable.test.ts index 77b7e03b97..c445acb4a7 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-disable.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-disable.test.ts @@ -26,8 +26,11 @@ instrumentation.enable(); instrumentation.disable(); import * as http from 'http'; -import { trace, TracerProvider, INVALID_SPAN_CONTEXT } from '@opentelemetry/api'; - +import { + trace, + TracerProvider, + INVALID_SPAN_CONTEXT, +} from '@opentelemetry/api'; describe('HttpInstrumentation', () => { let server: http.Server; @@ -40,9 +43,11 @@ describe('HttpInstrumentation', () => { before(() => { provider = { getTracer: () => { - startSpanStub = sinon.stub().returns(trace.wrapSpanContext(INVALID_SPAN_CONTEXT)); + startSpanStub = sinon + .stub() + .returns(trace.wrapSpanContext(INVALID_SPAN_CONTEXT)); return { startSpan: startSpanStub } as any; - } + }, }; nock.cleanAll(); nock.enableNetConnect(); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-enable.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-enable.test.ts index ecf5191427..670021b88e 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-enable.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-enable.test.ts @@ -41,7 +41,12 @@ import { DummyPropagation } from '../utils/DummyPropagation'; import { httpRequest } from '../utils/httpRequest'; import { ContextManager } from '@opentelemetry/api'; import { AsyncHooksContextManager } from '@opentelemetry/context-async-hooks'; -import type { ClientRequest, IncomingMessage, ServerResponse, RequestOptions } from 'http'; +import type { + ClientRequest, + IncomingMessage, + ServerResponse, + RequestOptions, +} from 'http'; import { isWrapped } from '@opentelemetry/instrumentation'; import { getRPCMetadata, RPCType } from '@opentelemetry/core'; @@ -100,13 +105,13 @@ export const responseHookFunction = ( export const startIncomingSpanHookFunction = ( request: IncomingMessage ): SpanAttributes => { - return {guid: request.headers?.guid}; + return { guid: request.headers?.guid }; }; export const startOutgoingSpanHookFunction = ( request: RequestOptions ): SpanAttributes => { - return {guid: request.headers?.guid}; + return { guid: request.headers?.guid }; }; describe('HttpInstrumentation', () => { @@ -176,8 +181,8 @@ describe('HttpInstrumentation', () => { `${protocol}://${hostname}:${serverPort}${pathname}`, { headers: { - 'user-agent': 'tester' - } + 'user-agent': 'tester', + }, } ); const spans = memoryExporter.getFinishedSpans(); @@ -219,7 +224,9 @@ describe('HttpInstrumentation', () => { (url: string) => url.endsWith('/ignored/function'), ], ignoreIncomingRequestHook: request => { - return request.headers['user-agent']?.match('ignored-string') != null; + return ( + request.headers['user-agent']?.match('ignored-string') != null + ); }, ignoreOutgoingUrls: [ `${protocol}://${hostname}:${serverPort}/ignored/string`, @@ -228,7 +235,10 @@ describe('HttpInstrumentation', () => { ], ignoreOutgoingRequestHook: request => { if (request.headers?.['user-agent'] != null) { - return `${request.headers['user-agent']}`.match('ignored-string') != null; + return ( + `${request.headers['user-agent']}`.match('ignored-string') != + null + ); } return false; }, @@ -312,17 +322,7 @@ describe('HttpInstrumentation', () => { }); const httpErrorCodes = [ - 400, - 401, - 403, - 404, - 429, - 501, - 503, - 504, - 500, - 505, - 597, + 400, 401, 403, 404, 429, 501, 503, 504, 500, 505, 597, ]; for (let i = 0; i < httpErrorCodes.length; i++) { @@ -410,36 +410,39 @@ describe('HttpInstrumentation', () => { ); const name = 'TestRootSpan'; const span = provider.getTracer('default').startSpan(name); - return context.with(trace.setSpan(context.active(), span), async () => { - const result = await httpRequest.get( - `${protocol}://${hostname}${testPath}` - ); - span.end(); - const spans = memoryExporter.getFinishedSpans(); - const [reqSpan, localSpan] = spans; - const validations = { - hostname, - httpStatusCode: result.statusCode!, - httpMethod: 'GET', - pathname: testPath, - resHeaders: result.resHeaders, - reqHeaders: result.reqHeaders, - component: 'http', - }; - - assert.ok(localSpan.name.indexOf('TestRootSpan') >= 0); - assert.strictEqual(spans.length, 2); - assert.strictEqual(reqSpan.name, 'HTTP GET'); - assert.strictEqual( - localSpan.spanContext().traceId, - reqSpan.spanContext().traceId - ); - assertSpan(reqSpan, SpanKind.CLIENT, validations); - assert.notStrictEqual( - localSpan.spanContext().spanId, - reqSpan.spanContext().spanId - ); - }); + return context.with( + trace.setSpan(context.active(), span), + async () => { + const result = await httpRequest.get( + `${protocol}://${hostname}${testPath}` + ); + span.end(); + const spans = memoryExporter.getFinishedSpans(); + const [reqSpan, localSpan] = spans; + const validations = { + hostname, + httpStatusCode: result.statusCode!, + httpMethod: 'GET', + pathname: testPath, + resHeaders: result.resHeaders, + reqHeaders: result.reqHeaders, + component: 'http', + }; + + assert.ok(localSpan.name.indexOf('TestRootSpan') >= 0); + assert.strictEqual(spans.length, 2); + assert.strictEqual(reqSpan.name, 'HTTP GET'); + assert.strictEqual( + localSpan.spanContext().traceId, + reqSpan.spanContext().traceId + ); + assertSpan(reqSpan, SpanKind.CLIENT, validations); + assert.notStrictEqual( + localSpan.spanContext().spanId, + reqSpan.spanContext().spanId + ); + } + ); }); } @@ -482,36 +485,27 @@ describe('HttpInstrumentation', () => { const testValue = 'ignored-string'; await Promise.all([ - httpRequest.get( - `${protocol}://${hostname}:${serverPort}`, - { - headers: { - 'user-agent': testValue - } - } - ), - httpRequest.get( - `${protocol}://${hostname}:${serverPort}`, - { - headers: { - 'uSeR-aGeNt': testValue - } - } - ), + httpRequest.get(`${protocol}://${hostname}:${serverPort}`, { + headers: { + 'user-agent': testValue, + }, + }), + httpRequest.get(`${protocol}://${hostname}:${serverPort}`, { + headers: { + 'uSeR-aGeNt': testValue, + }, + }), ]); const spans = memoryExporter.getFinishedSpans(); assert.strictEqual(spans.length, 0); }); it('should trace not ignored requests with headers (client and server side)', async () => { - await httpRequest.get( - `${protocol}://${hostname}:${serverPort}`, - { - headers: { - 'user-agent': 'test-bot', - } - } - ); + await httpRequest.get(`${protocol}://${hostname}:${serverPort}`, { + headers: { + 'user-agent': 'test-bot', + }, + }); const spans = memoryExporter.getFinishedSpans(); assert.strictEqual(spans.length, 2); }); @@ -554,29 +548,32 @@ describe('HttpInstrumentation', () => { } it('should have 1 ended span when request throw on bad "options" object', () => { - assert.throws(() => http.request({ headers: { cookie: undefined} }), (err: unknown) => { - const spans = memoryExporter.getFinishedSpans(); - assert.strictEqual(spans.length, 1); + assert.throws( + () => http.request({ headers: { cookie: undefined } }), + (err: unknown) => { + const spans = memoryExporter.getFinishedSpans(); + assert.strictEqual(spans.length, 1); - assert.ok(err instanceof Error); + assert.ok(err instanceof Error); - const validations = { - httpStatusCode: undefined, - httpMethod: 'GET', - resHeaders: {}, - hostname: 'localhost', - pathname: '/', - forceStatus: { - code: SpanStatusCode.ERROR, - message: err.message, - }, - component: 'http', - noNetPeer: true, - error: err, - }; - assertSpan(spans[0], SpanKind.CLIENT, validations); - return true; - }); + const validations = { + httpStatusCode: undefined, + httpMethod: 'GET', + resHeaders: {}, + hostname: 'localhost', + pathname: '/', + forceStatus: { + code: SpanStatusCode.ERROR, + message: err.message, + }, + component: 'http', + noNetPeer: true, + error: err, + }; + assertSpan(spans[0], SpanKind.CLIENT, validations); + return true; + } + ); }); it('should have 1 ended span when response.end throw an exception', async () => { @@ -733,7 +730,9 @@ describe('HttpInstrumentation', () => { const req = http.request(`${protocol}://${hostname}:${serverPort}/`); req.on('close', () => { - const spans = memoryExporter.getFinishedSpans().filter(it => it.kind === SpanKind.CLIENT); + const spans = memoryExporter + .getFinishedSpans() + .filter(it => it.kind === SpanKind.CLIENT); assert.strictEqual(spans.length, 1); const [span] = spans; assert.ok(Object.keys(span.attributes).length > 6); @@ -767,7 +766,7 @@ describe('HttpInstrumentation', () => { it('custom attributes should show up on client and server spans', async () => { await httpRequest.get( `${protocol}://${hostname}:${serverPort}${pathname}`, - {headers: {guid: 'user_guid'}} + { headers: { guid: 'user_guid' } } ); const spans = memoryExporter.getFinishedSpans(); const [incomingSpan, outgoingSpan] = spans; @@ -780,10 +779,7 @@ describe('HttpInstrumentation', () => { incomingSpan.attributes['custom response hook attribute'], 'response' ); - assert.strictEqual( - incomingSpan.attributes['guid'], - 'user_guid' - ); + assert.strictEqual(incomingSpan.attributes['guid'], 'user_guid'); assert.strictEqual( incomingSpan.attributes['span kind'], SpanKind.CLIENT @@ -797,10 +793,7 @@ describe('HttpInstrumentation', () => { outgoingSpan.attributes['custom response hook attribute'], 'response' ); - assert.strictEqual( - outgoingSpan.attributes['guid'], - 'user_guid' - ); + assert.strictEqual(outgoingSpan.attributes['guid'], 'user_guid'); assert.strictEqual( outgoingSpan.attributes['span kind'], SpanKind.CLIENT @@ -981,9 +974,15 @@ describe('HttpInstrumentation', () => { before(() => { instrumentation.setConfig({ headersToSpanAttributes: { - client: { requestHeaders: ['X-Client-Header1'], responseHeaders: ['X-Server-Header1'] }, - server: { requestHeaders: ['X-Client-Header2'], responseHeaders: ['X-Server-Header2'] }, - } + client: { + requestHeaders: ['X-Client-Header1'], + responseHeaders: ['X-Server-Header1'], + }, + server: { + requestHeaders: ['X-Client-Header2'], + responseHeaders: ['X-Server-Header2'], + }, + }, }); instrumentation.enable(); server = http.createServer((request, response) => { @@ -1007,7 +1006,7 @@ describe('HttpInstrumentation', () => { headers: { 'X-client-header1': 'client123', 'X-CLIENT-HEADER2': '123client', - } + }, } ); const spans = memoryExporter.getFinishedSpans(); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-metrics.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-metrics.test.ts index 3d80bb950d..df51a54fcf 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-metrics.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/http-metrics.test.ts @@ -39,14 +39,15 @@ const hostname = 'localhost'; const pathname = '/test'; const tracerProvider = new NodeTracerProvider(); const meterProvider = new MeterProvider(); -const metricsMemoryExporter = new InMemoryMetricExporter(AggregationTemporality.DELTA); +const metricsMemoryExporter = new InMemoryMetricExporter( + AggregationTemporality.DELTA +); const metricReader = new TestMetricReader(metricsMemoryExporter); meterProvider.addMetricReader(metricReader); instrumentation.setTracerProvider(tracerProvider); instrumentation.setMeterProvider(meterProvider); - describe('metrics', () => { beforeEach(() => { metricsMemoryExporter.reset(); @@ -68,7 +69,9 @@ describe('metrics', () => { it('should add server/client duration metrics', async () => { const requestCount = 3; for (let i = 0; i < requestCount; i++) { - await httpRequest.get(`${protocol}://${hostname}:${serverPort}${pathname}`); + await httpRequest.get( + `${protocol}://${hostname}:${serverPort}${pathname}` + ); } await metricReader.collectAndExport(); const resourceMetrics = metricsMemoryExporter.getMetrics(); @@ -77,28 +80,73 @@ describe('metrics', () => { const metrics = scopeMetrics[0].metrics; assert.strictEqual(metrics.length, 2, 'metrics count'); assert.strictEqual(metrics[0].dataPointType, DataPointType.HISTOGRAM); - assert.strictEqual(metrics[0].descriptor.description, 'measures the duration of the inbound HTTP requests'); + assert.strictEqual( + metrics[0].descriptor.description, + 'measures the duration of the inbound HTTP requests' + ); assert.strictEqual(metrics[0].descriptor.name, 'http.server.duration'); assert.strictEqual(metrics[0].descriptor.unit, 'ms'); assert.strictEqual(metrics[0].dataPoints.length, 1); - assert.strictEqual((metrics[0].dataPoints[0].value as any).count, requestCount); - assert.strictEqual(metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_SCHEME], 'http'); - assert.strictEqual(metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_METHOD], 'GET'); - assert.strictEqual(metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_FLAVOR], '1.1'); - assert.strictEqual(metrics[0].dataPoints[0].attributes[SemanticAttributes.NET_HOST_NAME], 'localhost'); - assert.strictEqual(metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_STATUS_CODE], 200); - assert.strictEqual(metrics[0].dataPoints[0].attributes[SemanticAttributes.NET_HOST_PORT], 22346); + assert.strictEqual( + (metrics[0].dataPoints[0].value as any).count, + requestCount + ); + assert.strictEqual( + metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_SCHEME], + 'http' + ); + assert.strictEqual( + metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_METHOD], + 'GET' + ); + assert.strictEqual( + metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_FLAVOR], + '1.1' + ); + assert.strictEqual( + metrics[0].dataPoints[0].attributes[SemanticAttributes.NET_HOST_NAME], + 'localhost' + ); + assert.strictEqual( + metrics[0].dataPoints[0].attributes[SemanticAttributes.HTTP_STATUS_CODE], + 200 + ); + assert.strictEqual( + metrics[0].dataPoints[0].attributes[SemanticAttributes.NET_HOST_PORT], + 22346 + ); assert.strictEqual(metrics[1].dataPointType, DataPointType.HISTOGRAM); - assert.strictEqual(metrics[1].descriptor.description, 'measures the duration of the outbound HTTP requests'); + assert.strictEqual( + metrics[1].descriptor.description, + 'measures the duration of the outbound HTTP requests' + ); assert.strictEqual(metrics[1].descriptor.name, 'http.client.duration'); assert.strictEqual(metrics[1].descriptor.unit, 'ms'); assert.strictEqual(metrics[1].dataPoints.length, 1); - assert.strictEqual((metrics[1].dataPoints[0].value as any).count, requestCount); - assert.strictEqual(metrics[1].dataPoints[0].attributes[SemanticAttributes.HTTP_METHOD], 'GET'); - assert.strictEqual(metrics[1].dataPoints[0].attributes[SemanticAttributes.NET_PEER_NAME], 'localhost'); - assert.strictEqual(metrics[1].dataPoints[0].attributes[SemanticAttributes.NET_PEER_PORT], 22346); - assert.strictEqual(metrics[1].dataPoints[0].attributes[SemanticAttributes.HTTP_STATUS_CODE], 200); - assert.strictEqual(metrics[1].dataPoints[0].attributes[SemanticAttributes.HTTP_FLAVOR], '1.1'); + assert.strictEqual( + (metrics[1].dataPoints[0].value as any).count, + requestCount + ); + assert.strictEqual( + metrics[1].dataPoints[0].attributes[SemanticAttributes.HTTP_METHOD], + 'GET' + ); + assert.strictEqual( + metrics[1].dataPoints[0].attributes[SemanticAttributes.NET_PEER_NAME], + 'localhost' + ); + assert.strictEqual( + metrics[1].dataPoints[0].attributes[SemanticAttributes.NET_PEER_PORT], + 22346 + ); + assert.strictEqual( + metrics[1].dataPoints[0].attributes[SemanticAttributes.HTTP_STATUS_CODE], + 200 + ); + assert.strictEqual( + metrics[1].dataPoints[0].attributes[SemanticAttributes.HTTP_FLAVOR], + '1.1' + ); }); }); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-disable.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-disable.test.ts index f87fa54dd0..b04259366b 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-disable.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-disable.test.ts @@ -28,7 +28,11 @@ instrumentation.disable(); import * as https from 'https'; import { httpsRequest } from '../utils/httpsRequest'; -import { INVALID_SPAN_CONTEXT, trace, TracerProvider } from '@opentelemetry/api'; +import { + INVALID_SPAN_CONTEXT, + trace, + TracerProvider, +} from '@opentelemetry/api'; describe('HttpsInstrumentation', () => { let server: https.Server; @@ -41,9 +45,11 @@ describe('HttpsInstrumentation', () => { before(() => { provider = { getTracer: () => { - startSpanStub = sinon.stub().returns(trace.wrapSpanContext(INVALID_SPAN_CONTEXT)); + startSpanStub = sinon + .stub() + .returns(trace.wrapSpanContext(INVALID_SPAN_CONTEXT)); return { startSpan: startSpanStub } as any; - } + }, }; nock.cleanAll(); nock.enableNetConnect(); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-enable.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-enable.test.ts index 5cfe4ac953..8ca18c5e98 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-enable.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-enable.test.ts @@ -151,8 +151,8 @@ describe('HttpsInstrumentation', () => { `${protocol}://${hostname}:${serverPort}${pathname}`, { headers: { - 'user-agent': 'tester' - } + 'user-agent': 'tester', + }, } ); const spans = memoryExporter.getFinishedSpans(); @@ -193,7 +193,9 @@ describe('HttpsInstrumentation', () => { (url: string) => url.endsWith('/ignored/function'), ], ignoreIncomingRequestHook: request => { - return request.headers['user-agent']?.match('ignored-string') != null; + return ( + request.headers['user-agent']?.match('ignored-string') != null + ); }, ignoreOutgoingUrls: [ `${protocol}://${hostname}:${serverPort}/ignored/string`, @@ -202,7 +204,10 @@ describe('HttpsInstrumentation', () => { ], ignoreOutgoingRequestHook: request => { if (request.headers?.['user-agent'] != null) { - return `${request.headers['user-agent']}`.match('ignored-string') != null; + return ( + `${request.headers['user-agent']}`.match('ignored-string') != + null + ); } return false; }, @@ -375,36 +380,39 @@ describe('HttpsInstrumentation', () => { ); const name = 'TestRootSpan'; const span = tracer.startSpan(name); - return context.with(trace.setSpan(context.active(), span), async () => { - const result = await httpsRequest.get( - `${protocol}://${hostname}${testPath}` - ); - span.end(); - const spans = memoryExporter.getFinishedSpans(); - const [reqSpan, localSpan] = spans; - const validations = { - hostname, - httpStatusCode: result.statusCode!, - httpMethod: 'GET', - pathname: testPath, - resHeaders: result.resHeaders, - reqHeaders: result.reqHeaders, - component: 'https', - }; - - assert.ok(localSpan.name.indexOf('TestRootSpan') >= 0); - assert.strictEqual(spans.length, 2); - assert.strictEqual(reqSpan.name, 'HTTPS GET'); - assert.strictEqual( - localSpan.spanContext().traceId, - reqSpan.spanContext().traceId - ); - assertSpan(reqSpan, SpanKind.CLIENT, validations); - assert.notStrictEqual( - localSpan.spanContext().spanId, - reqSpan.spanContext().spanId - ); - }); + return context.with( + trace.setSpan(context.active(), span), + async () => { + const result = await httpsRequest.get( + `${protocol}://${hostname}${testPath}` + ); + span.end(); + const spans = memoryExporter.getFinishedSpans(); + const [reqSpan, localSpan] = spans; + const validations = { + hostname, + httpStatusCode: result.statusCode!, + httpMethod: 'GET', + pathname: testPath, + resHeaders: result.resHeaders, + reqHeaders: result.reqHeaders, + component: 'https', + }; + + assert.ok(localSpan.name.indexOf('TestRootSpan') >= 0); + assert.strictEqual(spans.length, 2); + assert.strictEqual(reqSpan.name, 'HTTPS GET'); + assert.strictEqual( + localSpan.spanContext().traceId, + reqSpan.spanContext().traceId + ); + assertSpan(reqSpan, SpanKind.CLIENT, validations); + assert.notStrictEqual( + localSpan.spanContext().spanId, + reqSpan.spanContext().spanId + ); + } + ); }); } @@ -447,22 +455,16 @@ describe('HttpsInstrumentation', () => { const testValue = 'ignored-string'; await Promise.all([ - httpsRequest.get( - `${protocol}://${hostname}:${serverPort}`, - { - headers: { - 'user-agent': testValue - } - } - ), - httpsRequest.get( - `${protocol}://${hostname}:${serverPort}`, - { - headers: { - 'uSeR-aGeNt': testValue - } - } - ) + httpsRequest.get(`${protocol}://${hostname}:${serverPort}`, { + headers: { + 'user-agent': testValue, + }, + }), + httpsRequest.get(`${protocol}://${hostname}:${serverPort}`, { + headers: { + 'uSeR-aGeNt': testValue, + }, + }), ]); const spans = memoryExporter.getFinishedSpans(); assert.strictEqual(spans.length, 0); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-package.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-package.test.ts index 3fd164c243..28e9fb9804 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-package.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/https-package.test.ts @@ -101,10 +101,10 @@ describe('Packages', () => { const urlparsed = url.parse( name === 'got' && process.versions.node.startsWith('12') ? // there is an issue with got 9.6 version and node 12 when redirecting so url above will not work - // https://github.com/nock/nock/pull/1551 - // https://github.com/sindresorhus/got/commit/bf1aa5492ae2bc78cbbec6b7d764906fb156e6c2#diff-707a4781d57c42085155dcb27edb9ccbR258 - // TODO: check if this is still the case when new version - 'https://www.google.com' + // https://github.com/nock/nock/pull/1551 + // https://github.com/sindresorhus/got/commit/bf1aa5492ae2bc78cbbec6b7d764906fb156e6c2#diff-707a4781d57c42085155dcb27edb9ccbR258 + // TODO: check if this is still the case when new version + 'https://www.google.com' : 'https://www.google.com/search?q=axios&oq=axios&aqs=chrome.0.69i59l2j0l3j69i60.811j0j7&sourceid=chrome&ie=UTF-8' ); const result = await httpPackage.get(urlparsed.href!); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/utils.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/utils.test.ts index 689d97f70e..125585b75a 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/functionals/utils.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/functionals/utils.test.ts @@ -28,12 +28,12 @@ import { IncomingMessage, ServerResponse } from 'http'; import { Socket } from 'net'; import * as sinon from 'sinon'; import * as url from 'url'; -import {IgnoreMatcher, ParsedRequestOptions} from '../../src/types'; +import { IgnoreMatcher, ParsedRequestOptions } from '../../src/types'; import * as utils from '../../src/utils'; import { AttributeNames } from '../../src/enums/AttributeNames'; import { RPCType, setRPCMetadata } from '@opentelemetry/core'; import { AsyncHooksContextManager } from '@opentelemetry/context-async-hooks'; -import {extractHostnameAndPort} from '../../src/utils'; +import { extractHostnameAndPort } from '../../src/utils'; describe('Utility', () => { describe('parseResponseStatus()', () => { @@ -119,7 +119,7 @@ describe('Utility', () => { it('should throw if type is unknown', () => { try { - utils.satisfiesPattern('/TeSt/1', (true as unknown) as IgnoreMatcher); + utils.satisfiesPattern('/TeSt/1', true as unknown as IgnoreMatcher); assert.fail(); } catch (error) { assert.strictEqual(error instanceof TypeError, true); @@ -293,7 +293,7 @@ describe('Utility', () => { setRPCMetadata(context.active(), { type: RPCType.HTTP, route: '/user/:id', - span: (null as unknown) as Span, + span: null as unknown as Span, }), () => { const attributes = utils.getIncomingRequestAttributesOnResponse( @@ -466,13 +466,15 @@ describe('Utility', () => { it('should not set http.route in http span attributes', () => { const request = { url: 'http://hostname/user/:id', - method: 'GET' + method: 'GET', } as IncomingMessage; request.headers = { 'user-agent': 'chrome', - 'x-forwarded-for': ', , ' + 'x-forwarded-for': ', , ', }; - const attributes = utils.getIncomingRequestAttributes(request, { component: 'http'}); + const attributes = utils.getIncomingRequestAttributes(request, { + component: 'http', + }); assert.strictEqual(attributes[SemanticAttributes.HTTP_ROUTE], undefined); }); }); @@ -493,13 +495,23 @@ describe('Utility', () => { it('should set attributes for request and response keys', () => { utils.headerCapture('request', ['Origin'])(span, () => 'localhost'); utils.headerCapture('response', ['Cookie'])(span, () => 'token=123'); - assert.deepStrictEqual(span.attributes['http.request.header.origin'], ['localhost']); - assert.deepStrictEqual(span.attributes['http.response.header.cookie'], ['token=123']); + assert.deepStrictEqual(span.attributes['http.request.header.origin'], [ + 'localhost', + ]); + assert.deepStrictEqual(span.attributes['http.response.header.cookie'], [ + 'token=123', + ]); }); it('should set attributes for multiple values', () => { - utils.headerCapture('request', ['Origin'])(span, () => ['localhost', 'www.example.com']); - assert.deepStrictEqual(span.attributes['http.request.header.origin'], ['localhost', 'www.example.com']); + utils.headerCapture('request', ['Origin'])(span, () => [ + 'localhost', + 'www.example.com', + ]); + assert.deepStrictEqual(span.attributes['http.request.header.origin'], [ + 'localhost', + 'www.example.com', + ]); }); it('sets attributes for multiple headers', () => { @@ -515,13 +527,18 @@ describe('Utility', () => { return undefined; }); - assert.deepStrictEqual(span.attributes['http.request.header.origin'], ['localhost']); + assert.deepStrictEqual(span.attributes['http.request.header.origin'], [ + 'localhost', + ]); assert.deepStrictEqual(span.attributes['http.request.header.foo'], [42]); }); it('should normalize header names', () => { utils.headerCapture('request', ['X-Forwarded-For'])(span, () => 'foo'); - assert.deepStrictEqual(span.attributes['http.request.header.x_forwarded_for'], ['foo']); + assert.deepStrictEqual( + span.attributes['http.request.header.x_forwarded_for'], + ['foo'] + ); }); it('ignores non-existent headers', () => { @@ -533,60 +550,77 @@ describe('Utility', () => { return undefined; }); - assert.deepStrictEqual(span.attributes['http.request.header.origin'], ['localhost']); - assert.deepStrictEqual(span.attributes['http.request.header.accept'], undefined); + assert.deepStrictEqual(span.attributes['http.request.header.origin'], [ + 'localhost', + ]); + assert.deepStrictEqual( + span.attributes['http.request.header.accept'], + undefined + ); }); }); describe('extractHostnameAndPort', () => { it('should return the hostname and port defined in the parsedOptions', () => { - type tmpParsedOption = Pick; + type tmpParsedOption = Pick< + ParsedRequestOptions, + 'hostname' | 'host' | 'port' | 'protocol' + >; const parsedOption: tmpParsedOption = { hostname: 'www.google.com', port: '80', host: 'www.google.com', - protocol: 'http:' + protocol: 'http:', }; - const {hostname, port} = extractHostnameAndPort(parsedOption); + const { hostname, port } = extractHostnameAndPort(parsedOption); assert.strictEqual(hostname, parsedOption.hostname); assert.strictEqual(port, parsedOption.port); }); it('should return the hostname and port based on host field defined in the parsedOptions when hostname and port are missing', () => { - type tmpParsedOption = Pick; + type tmpParsedOption = Pick< + ParsedRequestOptions, + 'hostname' | 'host' | 'port' | 'protocol' + >; const parsedOption: tmpParsedOption = { hostname: null, port: null, host: 'www.google.com:8181', - protocol: 'http:' + protocol: 'http:', }; - const {hostname, port} = extractHostnameAndPort(parsedOption); + const { hostname, port } = extractHostnameAndPort(parsedOption); assert.strictEqual(hostname, 'www.google.com'); assert.strictEqual(port, '8181'); }); it('should infer the port number based on protocol https when can not extract it from host field', () => { - type tmpParsedOption = Pick; + type tmpParsedOption = Pick< + ParsedRequestOptions, + 'hostname' | 'host' | 'port' | 'protocol' + >; const parsedOption: tmpParsedOption = { hostname: null, port: null, host: 'www.google.com', - protocol: 'https:' + protocol: 'https:', }; - const {hostname, port} = extractHostnameAndPort(parsedOption); + const { hostname, port } = extractHostnameAndPort(parsedOption); assert.strictEqual(hostname, 'www.google.com'); assert.strictEqual(port, '443'); }); it('should infer the port number based on protocol http when can not extract it from host field', () => { - type tmpParsedOption = Pick; + type tmpParsedOption = Pick< + ParsedRequestOptions, + 'hostname' | 'host' | 'port' | 'protocol' + >; const parsedOption: tmpParsedOption = { hostname: null, port: null, host: 'www.google.com', - protocol: 'http:' + protocol: 'http:', }; - const {hostname, port} = extractHostnameAndPort(parsedOption); + const { hostname, port } = extractHostnameAndPort(parsedOption); assert.strictEqual(hostname, 'www.google.com'); assert.strictEqual(port, '80'); }); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/integrations/http-enable.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/integrations/http-enable.test.ts index 307dfcfea8..b6a8d18d87 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/integrations/http-enable.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/integrations/http-enable.test.ts @@ -318,7 +318,7 @@ describe('HttpInstrumentation Integration tests', () => { `${protocol}://localhost:${mockServerPort}/`, options, (resp: http.IncomingMessage) => { - const res = (resp as unknown) as http.IncomingMessage & { + const res = resp as unknown as http.IncomingMessage & { req: http.IncomingMessage; }; @@ -371,7 +371,8 @@ describe('HttpInstrumentation Integration tests', () => { try { await httpRequest.get( - `${protocol}://localhost:${mockServerPort}/timeout`, {timeout: 1} + `${protocol}://localhost:${mockServerPort}/timeout`, + { timeout: 1 } ); } catch (err) { assert.ok(err.message.startsWith('timeout')); @@ -381,7 +382,10 @@ describe('HttpInstrumentation Integration tests', () => { const span = spans.find(s => s.kind === SpanKind.CLIENT); assert.ok(span); assert.strictEqual(span.name, 'HTTP GET'); - assert.strictEqual(span.attributes[SemanticAttributes.HTTP_HOST], `localhost:${mockServerPort}`); + assert.strictEqual( + span.attributes[SemanticAttributes.HTTP_HOST], + `localhost:${mockServerPort}` + ); }); }); }); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/integrations/https-enable.test.ts b/experimental/packages/opentelemetry-instrumentation-http/test/integrations/https-enable.test.ts index f1ae83c030..08d636b164 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/integrations/https-enable.test.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/integrations/https-enable.test.ts @@ -322,7 +322,7 @@ describe('HttpsInstrumentation Integration tests', () => { `${protocol}://localhost:${mockServerPort}/`, options, (resp: http.IncomingMessage) => { - const res = (resp as unknown) as http.IncomingMessage & { + const res = resp as unknown as http.IncomingMessage & { req: http.IncomingMessage; }; diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/utils/DummyPropagation.ts b/experimental/packages/opentelemetry-instrumentation-http/test/utils/DummyPropagation.ts index 4dbda67c16..d5206813fa 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/utils/DummyPropagation.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/utils/DummyPropagation.ts @@ -15,7 +15,8 @@ */ import { Context, - TextMapPropagator, trace, + TextMapPropagator, + trace, TraceFlags, } from '@opentelemetry/api'; import type * as http from 'http'; diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/utils/TestMetricReader.ts b/experimental/packages/opentelemetry-instrumentation-http/test/utils/TestMetricReader.ts index 53ea687d64..12a59b1175 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/utils/TestMetricReader.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/utils/TestMetricReader.ts @@ -14,15 +14,13 @@ * limitations under the License. */ -import { - MetricReader, - PushMetricExporter -} from '@opentelemetry/sdk-metrics'; +import { MetricReader, PushMetricExporter } from '@opentelemetry/sdk-metrics'; export class TestMetricReader extends MetricReader { constructor(private _exporter: PushMetricExporter) { super({ - aggregationTemporalitySelector: _exporter.selectAggregationTemporality?.bind(_exporter), + aggregationTemporalitySelector: + _exporter.selectAggregationTemporality?.bind(_exporter), }); } diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts b/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts index 3b9b1a402d..2576b599d6 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts @@ -13,7 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { isValidSpanId, SpanKind, SpanStatus, Exception } from '@opentelemetry/api'; +import { + isValidSpanId, + SpanKind, + SpanStatus, + Exception, +} from '@opentelemetry/api'; import { hrTimeToNanoseconds } from '@opentelemetry/core'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; @@ -73,18 +78,20 @@ export const assertSpan = ( const eventAttributes = span.events[0].attributes; assert.ok(eventAttributes != null); - assert.deepStrictEqual( - Object.keys(eventAttributes), - ['exception.type', 'exception.message', 'exception.stacktrace'] - ); + assert.deepStrictEqual(Object.keys(eventAttributes), [ + 'exception.type', + 'exception.message', + 'exception.stacktrace', + ]); } else { assert.strictEqual(span.events.length, 0); } assert.deepStrictEqual( span.status, - validations.forceStatus || - { code: utils.parseResponseStatus(span.kind, validations.httpStatusCode) } + validations.forceStatus || { + code: utils.parseResponseStatus(span.kind, validations.httpStatusCode), + } ); assert.ok(span.endTime, 'must be finished'); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpRequest.ts b/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpRequest.ts index b2c2d828cb..57ca54574a 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpRequest.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpRequest.ts @@ -32,7 +32,7 @@ function get(input: any, options?: any): GetResult { let req: http.ClientRequest; function onGetResponseCb(resp: http.IncomingMessage): void { - const res = (resp as unknown) as http.IncomingMessage & { + const res = resp as unknown as http.IncomingMessage & { req: http.IncomingMessage; }; let data = ''; diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpsRequest.ts b/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpsRequest.ts index f75cf1e566..f00174e38b 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpsRequest.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/utils/httpsRequest.ts @@ -36,7 +36,7 @@ function get(input: any, options?: any): GetResult { let req: http.ClientRequest; function onGetResponseCb(resp: http.IncomingMessage): void { - const res = (resp as unknown) as http.IncomingMessage & { + const res = resp as unknown as http.IncomingMessage & { req: http.IncomingMessage; }; let data = ''; diff --git a/experimental/packages/opentelemetry-instrumentation-xml-http-request/src/xhr.ts b/experimental/packages/opentelemetry-instrumentation-xml-http-request/src/xhr.ts index 59d03b9c85..621dc73f89 100644 --- a/experimental/packages/opentelemetry-instrumentation-xml-http-request/src/xhr.ts +++ b/experimental/packages/opentelemetry-instrumentation-xml-http-request/src/xhr.ts @@ -89,11 +89,7 @@ export class XMLHttpRequestInstrumentation extends InstrumentationBase(); constructor(config?: XMLHttpRequestInstrumentationConfig) { - super( - '@opentelemetry/instrumentation-xml-http-request', - VERSION, - config - ); + super('@opentelemetry/instrumentation-xml-http-request', VERSION, config); } init() {} @@ -181,8 +177,8 @@ export class XMLHttpRequestInstrumentation extends InstrumentationBase applyCustomAttributesOnSpan(span, xhr), @@ -245,7 +241,7 @@ export class XMLHttpRequestInstrumentation extends InstrumentationBase(); this._usedResources = new WeakSet(); } @@ -274,7 +270,7 @@ export class XMLHttpRequestInstrumentation extends InstrumentationBase { ); spyEntries = sinon.stub( - (performance as unknown) as Performance, + performance as unknown as Performance, 'getEntriesByType' ); spyEntries.withArgs('resource').returns(resources); @@ -253,7 +250,7 @@ describe('xhr', () => { dummySpanExporter = new DummySpanExporter(); exportSpy = sinon.stub(dummySpanExporter, 'export'); clearResourceTimingsSpy = sinon.stub( - (performance as unknown) as Performance, + performance as unknown as Performance, 'clearResourceTimings' ); webTracerProviderWithZone.addSpanProcessor( @@ -261,27 +258,30 @@ describe('xhr', () => { ); rootSpan = webTracerWithZone.startSpan('root'); - api.context.with(api.trace.setSpan(api.context.active(), rootSpan), () => { - void getData( - new XMLHttpRequest(), - fileUrl, - () => { - fakeNow = 100; - }, - testAsync - ).then(() => { - fakeNow = 0; - sinon.clock.tick(1000); - done(); - }); - assert.strictEqual(requests.length, 1, 'request not called'); + api.context.with( + api.trace.setSpan(api.context.active(), rootSpan), + () => { + void getData( + new XMLHttpRequest(), + fileUrl, + () => { + fakeNow = 100; + }, + testAsync + ).then(() => { + fakeNow = 0; + sinon.clock.tick(1000); + done(); + }); + assert.strictEqual(requests.length, 1, 'request not called'); - requests[0].respond( - 200, - { 'Content-Type': 'application/json' }, - '{"foo":"bar"}' - ); - }); + requests[0].respond( + 200, + { 'Content-Type': 'application/json' }, + '{"foo":"bar"}' + ); + } + ); }; beforeEach(done => { @@ -563,7 +563,7 @@ describe('xhr', () => { describe( 'AND origin does NOT match window.location but match with' + - ' propagateTraceHeaderCorsUrls', + ' propagateTraceHeaderCorsUrls', () => { beforeEach(done => { clearData(); @@ -596,7 +596,7 @@ describe('xhr', () => { ); describe( 'AND origin does NOT match window.location And does NOT match' + - ' with propagateTraceHeaderCorsUrls', + ' with propagateTraceHeaderCorsUrls', () => { let spyDebug: sinon.SinonSpy; beforeEach(done => { @@ -780,7 +780,11 @@ describe('xhr', () => { `Wrong number of spans: ${exportSpy.args.length}` ); - assert.strictEqual(events.length, 12, `number of events is wrong: ${events.length}`); + assert.strictEqual( + events.length, + 12, + `number of events is wrong: ${events.length}` + ); assert.strictEqual( events[8].name, PTN.REQUEST_START, @@ -799,7 +803,6 @@ describe('xhr', () => { ); }); }); - }); describe('when request is NOT successful', () => { @@ -834,7 +837,7 @@ describe('xhr', () => { ); spyEntries = sinon.stub( - (performance as unknown) as Performance, + performance as unknown as Performance, 'getEntriesByType' ); spyEntries.withArgs('resource').returns(resources); @@ -865,75 +868,85 @@ describe('xhr', () => { }); function timedOutRequest(done: any) { - api.context.with(api.trace.setSpan(api.context.active(), rootSpan), () => { - void getData( - new XMLHttpRequest(), - url, - () => { - sinon.clock.tick(XHR_TIMEOUT); - }, - testAsync - ).then(() => { - fakeNow = 0; - sinon.clock.tick(1000); - done(); - }); - }); - } - - function abortedRequest(done: any) { - api.context.with(api.trace.setSpan(api.context.active(), rootSpan), () => { - void getData(new XMLHttpRequest(), url, () => { - }, testAsync).then( - () => { + api.context.with( + api.trace.setSpan(api.context.active(), rootSpan), + () => { + void getData( + new XMLHttpRequest(), + url, + () => { + sinon.clock.tick(XHR_TIMEOUT); + }, + testAsync + ).then(() => { fakeNow = 0; sinon.clock.tick(1000); done(); - } - ); - - assert.strictEqual(requests.length, 1, 'request not called'); - requests[0].abort(); - }); + }); + } + ); } - function erroredRequest(done: any) { - api.context.with(api.trace.setSpan(api.context.active(), rootSpan), () => { - void getData( - new XMLHttpRequest(), - url, - () => { - fakeNow = 100; - }, - testAsync - ).then(() => { - fakeNow = 0; - sinon.clock.tick(1000); - done(); - }); - assert.strictEqual(requests.length, 1, 'request not called'); - requests[0].respond( - 400, - { 'Content-Type': 'text/plain' }, - 'Bad Request' - ); - }); + function abortedRequest(done: any) { + api.context.with( + api.trace.setSpan(api.context.active(), rootSpan), + () => { + void getData(new XMLHttpRequest(), url, () => {}, testAsync).then( + () => { + fakeNow = 0; + sinon.clock.tick(1000); + done(); + } + ); + + assert.strictEqual(requests.length, 1, 'request not called'); + requests[0].abort(); + } + ); } - function networkErrorRequest(done: any) { - api.context.with(api.trace.setSpan(api.context.active(), rootSpan), () => { - void getData(new XMLHttpRequest(), url, () => { - }, testAsync).then( - () => { + function erroredRequest(done: any) { + api.context.with( + api.trace.setSpan(api.context.active(), rootSpan), + () => { + void getData( + new XMLHttpRequest(), + url, + () => { + fakeNow = 100; + }, + testAsync + ).then(() => { fakeNow = 0; sinon.clock.tick(1000); done(); - } - ); + }); + assert.strictEqual(requests.length, 1, 'request not called'); + requests[0].respond( + 400, + { 'Content-Type': 'text/plain' }, + 'Bad Request' + ); + } + ); + } - assert.strictEqual(requests.length, 1, 'request not called'); - requests[0].error(); - }); + function networkErrorRequest(done: any) { + api.context.with( + api.trace.setSpan(api.context.active(), rootSpan), + () => { + void getData(new XMLHttpRequest(), url, () => {}, testAsync).then( + () => { + fakeNow = 0; + sinon.clock.tick(1000); + done(); + } + ); + + assert.strictEqual(requests.length, 1, 'request not called'); + requests[0].error(); + } + ); } describe('when request loads and receives an error code', () => { diff --git a/experimental/packages/opentelemetry-instrumentation/src/autoLoaderUtils.ts b/experimental/packages/opentelemetry-instrumentation/src/autoLoaderUtils.ts index a6d63de472..3dc1927214 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/autoLoaderUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/autoLoaderUtils.ts @@ -76,6 +76,8 @@ export function enableInstrumentations( * Disable instrumentations * @param instrumentations */ -export function disableInstrumentations(instrumentations: Instrumentation[]): void { +export function disableInstrumentations( + instrumentations: Instrumentation[] +): void { instrumentations.forEach(instrumentation => instrumentation.disable()); } diff --git a/experimental/packages/opentelemetry-instrumentation/src/instrumentation.ts b/experimental/packages/opentelemetry-instrumentation/src/instrumentation.ts index d562ed9056..4b729fd439 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/instrumentation.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/instrumentation.ts @@ -32,7 +32,8 @@ import * as types from './types'; * Base abstract internal class for instrumenting node and web plugins */ export abstract class InstrumentationAbstract -implements types.Instrumentation { + implements types.Instrumentation +{ protected _config: types.InstrumentationConfig; private _tracer: Tracer; diff --git a/experimental/packages/opentelemetry-instrumentation/src/platform/browser/instrumentation.ts b/experimental/packages/opentelemetry-instrumentation/src/platform/browser/instrumentation.ts index 3f19df8ac5..0458357a73 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/platform/browser/instrumentation.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/platform/browser/instrumentation.ts @@ -22,7 +22,8 @@ import * as types from '../../types'; */ export abstract class InstrumentationBase extends InstrumentationAbstract - implements types.Instrumentation { + implements types.Instrumentation +{ constructor( instrumentationName: string, instrumentationVersion: string, diff --git a/experimental/packages/opentelemetry-instrumentation/src/platform/node/ModuleNameTrie.ts b/experimental/packages/opentelemetry-instrumentation/src/platform/node/ModuleNameTrie.ts index 3230fea99c..19b3e0912a 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/platform/node/ModuleNameTrie.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/platform/node/ModuleNameTrie.ts @@ -22,7 +22,7 @@ export const ModuleNameSeparator = '/'; * Node in a `ModuleNameTrie` */ class ModuleNameTrieNode { - hooks: Array<{ hook: Hooked, insertedId: number }> = []; + hooks: Array<{ hook: Hooked; insertedId: number }> = []; children: Map = new Map(); } @@ -59,7 +59,10 @@ export class ModuleNameTrie { * @param {boolean} maintainInsertionOrder Whether to return the results in insertion order * @returns {Hooked[]} Matching hooks */ - search(moduleName: string, { maintainInsertionOrder }: { maintainInsertionOrder?: boolean } = {}): Hooked[] { + search( + moduleName: string, + { maintainInsertionOrder }: { maintainInsertionOrder?: boolean } = {} + ): Hooked[] { let trieNode = this._trie; const results: ModuleNameTrieNode['hooks'] = []; diff --git a/experimental/packages/opentelemetry-instrumentation/src/platform/node/RequireInTheMiddleSingleton.ts b/experimental/packages/opentelemetry-instrumentation/src/platform/node/RequireInTheMiddleSingleton.ts index 812db52b68..76089a142f 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/platform/node/RequireInTheMiddleSingleton.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/platform/node/RequireInTheMiddleSingleton.ts @@ -19,8 +19,8 @@ import * as path from 'path'; import { ModuleNameTrie, ModuleNameSeparator } from './ModuleNameTrie'; export type Hooked = { - moduleName: string - onRequire: RequireInTheMiddle.OnRequireFn + moduleName: string; + onRequire: RequireInTheMiddle.OnRequireFn; }; /** @@ -29,7 +29,14 @@ export type Hooked = { * * @type {boolean} */ -const isMocha = ['afterEach','after','beforeEach','before','describe','it'].every(fn => { +const isMocha = [ + 'afterEach', + 'after', + 'beforeEach', + 'before', + 'describe', + 'it', +].every(fn => { // @ts-expect-error TS7053: Element implicitly has an 'any' type return typeof global[fn] === 'function'; }); @@ -60,7 +67,9 @@ export class RequireInTheMiddleSingleton { // For internal files on Windows, `name` will use backslash as the path separator const normalizedModuleName = normalizePathSeparators(name); - const matches = this._moduleNameTrie.search(normalizedModuleName, { maintainInsertionOrder: true }); + const matches = this._moduleNameTrie.search(normalizedModuleName, { + maintainInsertionOrder: true, + }); for (const { onRequire } of matches) { exports = onRequire(exports, name, basedir); @@ -78,7 +87,10 @@ export class RequireInTheMiddleSingleton { * @param {RequireInTheMiddle.OnRequireFn} onRequire Hook function * @returns {Hooked} Registered hook */ - register(moduleName: string, onRequire: RequireInTheMiddle.OnRequireFn): Hooked { + register( + moduleName: string, + onRequire: RequireInTheMiddle.OnRequireFn + ): Hooked { const hooked = { moduleName, onRequire }; this._moduleNameTrie.insert(hooked); return hooked; @@ -94,7 +106,8 @@ export class RequireInTheMiddleSingleton { // This prevents test suites from sharing a singleton if (isMocha) return new RequireInTheMiddleSingleton(); - return this._instance = this._instance ?? new RequireInTheMiddleSingleton(); + return (this._instance = + this._instance ?? new RequireInTheMiddleSingleton()); } } diff --git a/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentation.ts b/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentation.ts index d80985431c..ba10d49576 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentation.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentation.ts @@ -18,7 +18,10 @@ import * as types from '../../types'; import * as path from 'path'; import { satisfies } from 'semver'; import { InstrumentationAbstract } from '../../instrumentation'; -import { RequireInTheMiddleSingleton, Hooked } from './RequireInTheMiddleSingleton'; +import { + RequireInTheMiddleSingleton, + Hooked, +} from './RequireInTheMiddleSingleton'; import { InstrumentationModuleDefinition } from './types'; import { diag } from '@opentelemetry/api'; @@ -27,10 +30,12 @@ import { diag } from '@opentelemetry/api'; */ export abstract class InstrumentationBase extends InstrumentationAbstract - implements types.Instrumentation { + implements types.Instrumentation +{ private _modules: InstrumentationModuleDefinition[]; private _hooks: Hooked[] = []; - private _requireInTheMiddleSingleton: RequireInTheMiddleSingleton = RequireInTheMiddleSingleton.getInstance(); + private _requireInTheMiddleSingleton: RequireInTheMiddleSingleton = + RequireInTheMiddleSingleton.getInstance(); private _enabled = false; constructor( @@ -51,8 +56,8 @@ export abstract class InstrumentationBase if (this._modules.length === 0) { diag.debug( 'No modules instrumentation has been defined for ' + - `'${this.instrumentationName}@${this.instrumentationVersion}'` + - ', nothing will be patched' + `'${this.instrumentationName}@${this.instrumentationVersion}'` + + ', nothing will be patched' ); } @@ -68,7 +73,9 @@ export abstract class InstrumentationBase const resolvedModule = require.resolve(name); if (require.cache[resolvedModule]) { // Module is already cached, which means the instrumentation hook might not work - this._diag.warn(`Module ${name} has been loaded before ${this.instrumentationName} so it might not work, please initialize it before requiring ${name}`); + this._diag.warn( + `Module ${name} has been loaded before ${this.instrumentationName} so it might not work, please initialize it before requiring ${name}` + ); } } catch { // Module isn't available, we can simply skip @@ -124,17 +131,16 @@ export abstract class InstrumentationBase const files = module.files ?? []; const supportedFileInstrumentations = files .filter(f => f.name === name) - .filter(f => isSupported(f.supportedVersions, version, module.includePrerelease)); - return supportedFileInstrumentations.reduce( - (patchedExports, file) => { - file.moduleExports = patchedExports; - if (this._enabled) { - return file.patch(patchedExports, module.moduleVersion); - } - return patchedExports; - }, - exports, - ); + .filter(f => + isSupported(f.supportedVersions, version, module.includePrerelease) + ); + return supportedFileInstrumentations.reduce((patchedExports, file) => { + file.moduleExports = patchedExports; + if (this._enabled) { + return file.patch(patchedExports, module.moduleVersion); + } + return patchedExports; + }, exports); } public enable(): void { @@ -165,7 +171,7 @@ export abstract class InstrumentationBase module.name, (exports, name, baseDir) => { return this._onRequire( - (module as unknown) as InstrumentationModuleDefinition< + module as unknown as InstrumentationModuleDefinition< typeof exports >, exports, @@ -201,7 +207,11 @@ export abstract class InstrumentationBase } } -function isSupported(supportedVersions: string[], version?: string, includePrerelease?: boolean): boolean { +function isSupported( + supportedVersions: string[], + version?: string, + includePrerelease?: boolean +): boolean { if (typeof version === 'undefined') { // If we don't have the version, accept the wildcard case only return supportedVersions.includes('*'); diff --git a/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleDefinition.ts b/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleDefinition.ts index 8d4885c7b9..e45a943a7f 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleDefinition.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleDefinition.ts @@ -20,7 +20,8 @@ import { } from './types'; export class InstrumentationNodeModuleDefinition -implements InstrumentationModuleDefinition { + implements InstrumentationModuleDefinition +{ files: InstrumentationModuleFile[]; constructor( public name: string, diff --git a/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleFile.ts b/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleFile.ts index 80b617c5d7..574f17257e 100644 --- a/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleFile.ts +++ b/experimental/packages/opentelemetry-instrumentation/src/platform/node/instrumentationNodeModuleFile.ts @@ -18,7 +18,8 @@ import { InstrumentationModuleFile } from './types'; import { normalize } from 'path'; export class InstrumentationNodeModuleFile -implements InstrumentationModuleFile { + implements InstrumentationModuleFile +{ public name: string; constructor( name: string, diff --git a/experimental/packages/opentelemetry-instrumentation/test/common/autoLoader.test.ts b/experimental/packages/opentelemetry-instrumentation/test/common/autoLoader.test.ts index c7bebf204c..aa8c5582a3 100644 --- a/experimental/packages/opentelemetry-instrumentation/test/common/autoLoader.test.ts +++ b/experimental/packages/opentelemetry-instrumentation/test/common/autoLoader.test.ts @@ -90,11 +90,9 @@ describe('autoLoader', () => { unload(); unload = undefined; } - instrumentation = new FooInstrumentation( - 'foo', - '1', - { enabled: false } - ); + instrumentation = new FooInstrumentation('foo', '1', { + enabled: false, + }); enableSpy = sinon.spy(instrumentation, 'enable'); setTracerProviderSpy = sinon.stub(instrumentation, 'setTracerProvider'); setMeterProviderSpy = sinon.stub(instrumentation, 'setMeterProvider'); diff --git a/experimental/packages/opentelemetry-instrumentation/test/node/InstrumentationBase.test.ts b/experimental/packages/opentelemetry-instrumentation/test/node/InstrumentationBase.test.ts index 062088886b..7671c4c975 100644 --- a/experimental/packages/opentelemetry-instrumentation/test/node/InstrumentationBase.test.ts +++ b/experimental/packages/opentelemetry-instrumentation/test/node/InstrumentationBase.test.ts @@ -16,7 +16,10 @@ import * as assert from 'assert'; import * as sinon from 'sinon'; -import { InstrumentationBase, InstrumentationModuleDefinition } from '../../src'; +import { + InstrumentationBase, + InstrumentationModuleDefinition, +} from '../../src'; const MODULE_NAME = 'test-module'; const MODULE_FILE_NAME = 'test-module-file'; @@ -145,8 +148,15 @@ describe('InstrumentationBase', () => { ); assert.strictEqual(instrumentationModule.moduleVersion, undefined); - assert.strictEqual(instrumentationModule.moduleExports, moduleExports); - sinon.assert.calledOnceWithExactly(modulePatchSpy, moduleExports, undefined); + assert.strictEqual( + instrumentationModule.moduleExports, + moduleExports + ); + sinon.assert.calledOnceWithExactly( + modulePatchSpy, + moduleExports, + undefined + ); }); }); }); @@ -166,11 +176,13 @@ describe('InstrumentationBase', () => { supportedVersions, name: MODULE_NAME, patch: modulePatchSpy as unknown, - files: [{ - name: MODULE_FILE_NAME, - supportedVersions, - patch: filePatchSpy as unknown - }] + files: [ + { + name: MODULE_FILE_NAME, + supportedVersions, + patch: filePatchSpy as unknown, + }, + ], } as InstrumentationModuleDefinition; // @ts-expect-error access internal property for testing @@ -196,11 +208,13 @@ describe('InstrumentationBase', () => { supportedVersions, name: MODULE_NAME, patch: modulePatchSpy as unknown, - files: [{ - name: MODULE_FILE_NAME, - supportedVersions, - patch: filePatchSpy as unknown - }] + files: [ + { + name: MODULE_FILE_NAME, + supportedVersions, + patch: filePatchSpy as unknown, + }, + ], } as InstrumentationModuleDefinition; // @ts-expect-error access internal property for testing @@ -212,9 +226,16 @@ describe('InstrumentationBase', () => { ); assert.strictEqual(instrumentationModule.moduleVersion, undefined); - assert.strictEqual(instrumentationModule.files[0].moduleExports, moduleExports); + assert.strictEqual( + instrumentationModule.files[0].moduleExports, + moduleExports + ); sinon.assert.notCalled(modulePatchSpy); - sinon.assert.calledOnceWithExactly(filePatchSpy, moduleExports, undefined); + sinon.assert.calledOnceWithExactly( + filePatchSpy, + moduleExports, + undefined + ); }); }); @@ -226,15 +247,18 @@ describe('InstrumentationBase', () => { supportedVersions, name: MODULE_NAME, patch: modulePatchSpy as unknown, - files: [{ - name: MODULE_FILE_NAME, - supportedVersions, - patch: filePatchSpy as unknown - }, { - name: MODULE_FILE_NAME, - supportedVersions, - patch: filePatchSpy as unknown - }] + files: [ + { + name: MODULE_FILE_NAME, + supportedVersions, + patch: filePatchSpy as unknown, + }, + { + name: MODULE_FILE_NAME, + supportedVersions, + patch: filePatchSpy as unknown, + }, + ], } as InstrumentationModuleDefinition; // @ts-expect-error access internal property for testing @@ -246,8 +270,14 @@ describe('InstrumentationBase', () => { ); assert.strictEqual(instrumentationModule.moduleVersion, undefined); - assert.strictEqual(instrumentationModule.files[0].moduleExports, moduleExports); - assert.strictEqual(instrumentationModule.files[1].moduleExports, moduleExports); + assert.strictEqual( + instrumentationModule.files[0].moduleExports, + moduleExports + ); + assert.strictEqual( + instrumentationModule.files[1].moduleExports, + moduleExports + ); sinon.assert.notCalled(modulePatchSpy); sinon.assert.calledTwice(filePatchSpy); }); diff --git a/experimental/packages/opentelemetry-instrumentation/test/node/ModuleNameTrie.test.ts b/experimental/packages/opentelemetry-instrumentation/test/node/ModuleNameTrie.test.ts index c3d72c89d7..7418722e26 100644 --- a/experimental/packages/opentelemetry-instrumentation/test/node/ModuleNameTrie.test.ts +++ b/experimental/packages/opentelemetry-instrumentation/test/node/ModuleNameTrie.test.ts @@ -26,7 +26,7 @@ describe('ModuleNameTrie', () => { { moduleName: 'a/b', onRequire: () => {} }, { moduleName: 'a', onRequire: () => {} }, { moduleName: 'a/c', onRequire: () => {} }, - { moduleName: 'd', onRequire: () => {} } + { moduleName: 'd', onRequire: () => {} }, ] as Hooked[]; inserts.forEach(trie.insert.bind(trie)); @@ -39,10 +39,7 @@ describe('ModuleNameTrie', () => { }); it('should return a list of exact matches (more than one result)', () => { - assert.deepEqual(trie.search('a'), [ - inserts[0], - inserts[2] - ]); + assert.deepEqual(trie.search('a'), [inserts[0], inserts[2]]); }); describe('maintainInsertionOrder = false', () => { @@ -50,7 +47,7 @@ describe('ModuleNameTrie', () => { assert.deepEqual(trie.search('a/b'), [ inserts[0], inserts[2], - inserts[1] + inserts[1], ]); }); }); @@ -60,7 +57,7 @@ describe('ModuleNameTrie', () => { assert.deepEqual(trie.search('a/b', { maintainInsertionOrder: true }), [ inserts[0], inserts[1], - inserts[2] + inserts[2], ]); }); }); diff --git a/experimental/packages/opentelemetry-instrumentation/test/node/RequireInTheMiddleSingleton.test.ts b/experimental/packages/opentelemetry-instrumentation/test/node/RequireInTheMiddleSingleton.test.ts index 724dced720..8fddad4598 100644 --- a/experimental/packages/opentelemetry-instrumentation/test/node/RequireInTheMiddleSingleton.test.ts +++ b/experimental/packages/opentelemetry-instrumentation/test/node/RequireInTheMiddleSingleton.test.ts @@ -23,14 +23,15 @@ import { RequireInTheMiddleSingleton } from '../../src/platform/node/RequireInTh const requireInTheMiddleSingleton = RequireInTheMiddleSingleton.getInstance(); type AugmentedExports = { - __ritmOnRequires?: string[] + __ritmOnRequires?: string[]; }; -const makeOnRequiresStub = (label: string): sinon.SinonStub => sinon.stub().callsFake(((exports: AugmentedExports) => { - exports.__ritmOnRequires ??= []; - exports.__ritmOnRequires.push(label); - return exports; -}) as RequireInTheMiddle.OnRequireFn); +const makeOnRequiresStub = (label: string): sinon.SinonStub => + sinon.stub().callsFake(((exports: AugmentedExports) => { + exports.__ritmOnRequires ??= []; + exports.__ritmOnRequires.push(label); + return exports; + }) as RequireInTheMiddle.OnRequireFn); describe('RequireInTheMiddleSingleton', () => { describe('register', () => { @@ -43,11 +44,20 @@ describe('RequireInTheMiddleSingleton', () => { before(() => { requireInTheMiddleSingleton.register('fs', onRequireFsStub); - requireInTheMiddleSingleton.register('fs/promises', onRequireFsPromisesStub); + requireInTheMiddleSingleton.register( + 'fs/promises', + onRequireFsPromisesStub + ); requireInTheMiddleSingleton.register('codecov', onRequireCodecovStub); - requireInTheMiddleSingleton.register('codecov/lib/codecov.js', onRequireCodecovLibStub); + requireInTheMiddleSingleton.register( + 'codecov/lib/codecov.js', + onRequireCodecovLibStub + ); requireInTheMiddleSingleton.register('cpx', onRequireCpxStub); - requireInTheMiddleSingleton.register('cpx/lib/copy-sync.js', onRequireCpxLibStub); + requireInTheMiddleSingleton.register( + 'cpx/lib/copy-sync.js', + onRequireCpxLibStub + ); }); beforeEach(() => { @@ -62,7 +72,10 @@ describe('RequireInTheMiddleSingleton', () => { it('should return a hooked object', () => { const moduleName = 'm'; const onRequire = makeOnRequiresStub('m'); - const hooked = requireInTheMiddleSingleton.register(moduleName, onRequire); + const hooked = requireInTheMiddleSingleton.register( + moduleName, + onRequire + ); assert.deepStrictEqual(hooked, { moduleName, onRequire }); }); @@ -71,7 +84,12 @@ describe('RequireInTheMiddleSingleton', () => { it('should call `onRequire`', () => { const exports = require('fs'); assert.deepStrictEqual(exports.__ritmOnRequires, ['fs']); - sinon.assert.calledOnceWithExactly(onRequireFsStub, exports, 'fs', undefined); + sinon.assert.calledOnceWithExactly( + onRequireFsStub, + exports, + 'fs', + undefined + ); sinon.assert.notCalled(onRequireFsPromisesStub); }); }); @@ -88,9 +106,22 @@ describe('RequireInTheMiddleSingleton', () => { describe('AND module name matches', () => { it('should call `onRequire`', () => { const exports = require('fs/promises'); - assert.deepStrictEqual(exports.__ritmOnRequires, ['fs', 'fs-promises']); - sinon.assert.calledOnceWithExactly(onRequireFsPromisesStub, exports, 'fs/promises', undefined); - sinon.assert.calledOnceWithMatch(onRequireFsStub, { __ritmOnRequires: ['fs', 'fs-promises'] }, 'fs/promises', undefined); + assert.deepStrictEqual(exports.__ritmOnRequires, [ + 'fs', + 'fs-promises', + ]); + sinon.assert.calledOnceWithExactly( + onRequireFsPromisesStub, + exports, + 'fs/promises', + undefined + ); + sinon.assert.calledOnceWithMatch( + onRequireFsStub, + { __ritmOnRequires: ['fs', 'fs-promises'] }, + 'fs/promises', + undefined + ); }); }); }); @@ -102,23 +133,56 @@ describe('RequireInTheMiddleSingleton', () => { it('should call `onRequire`', () => { const exports = require('codecov'); assert.deepStrictEqual(exports.__ritmOnRequires, ['codecov']); - sinon.assert.calledWithExactly(onRequireCodecovStub, exports, 'codecov', baseDir); - sinon.assert.calledWithMatch(onRequireCodecovStub, { __ritmOnRequires: ['codecov', 'codecov-lib'] }, modulePath, baseDir); - sinon.assert.calledWithMatch(onRequireCodecovLibStub, { __ritmOnRequires: ['codecov', 'codecov-lib'] }, modulePath, baseDir); + sinon.assert.calledWithExactly( + onRequireCodecovStub, + exports, + 'codecov', + baseDir + ); + sinon.assert.calledWithMatch( + onRequireCodecovStub, + { __ritmOnRequires: ['codecov', 'codecov-lib'] }, + modulePath, + baseDir + ); + sinon.assert.calledWithMatch( + onRequireCodecovLibStub, + { __ritmOnRequires: ['codecov', 'codecov-lib'] }, + modulePath, + baseDir + ); }).timeout(30000); }); }); describe('non-core module with sub-path', () => { describe('AND module name matches', () => { - const baseDir = path.resolve(path.dirname(require.resolve('cpx')), '..'); + const baseDir = path.resolve( + path.dirname(require.resolve('cpx')), + '..' + ); const modulePath = path.join('cpx', 'lib', 'copy-sync.js'); it('should call `onRequire`', () => { const exports = require('cpx/lib/copy-sync'); assert.deepStrictEqual(exports.__ritmOnRequires, ['cpx', 'cpx-lib']); - sinon.assert.calledWithMatch(onRequireCpxStub, { __ritmOnRequires: ['cpx', 'cpx-lib'] }, modulePath, baseDir); - sinon.assert.calledWithExactly(onRequireCpxStub, exports, modulePath, baseDir); - sinon.assert.calledWithExactly(onRequireCpxLibStub, exports, modulePath, baseDir); + sinon.assert.calledWithMatch( + onRequireCpxStub, + { __ritmOnRequires: ['cpx', 'cpx-lib'] }, + modulePath, + baseDir + ); + sinon.assert.calledWithExactly( + onRequireCpxStub, + exports, + modulePath, + baseDir + ); + sinon.assert.calledWithExactly( + onRequireCpxLibStub, + exports, + modulePath, + baseDir + ); }); }); }); diff --git a/experimental/packages/opentelemetry-sdk-node/src/TracerProviderWithEnvExporter.ts b/experimental/packages/opentelemetry-sdk-node/src/TracerProviderWithEnvExporter.ts index f4abb593ab..20082a9b6e 100644 --- a/experimental/packages/opentelemetry-sdk-node/src/TracerProviderWithEnvExporter.ts +++ b/experimental/packages/opentelemetry-sdk-node/src/TracerProviderWithEnvExporter.ts @@ -16,11 +16,21 @@ import { diag } from '@opentelemetry/api'; import { getEnv, getEnvWithoutDefaults } from '@opentelemetry/core'; -import { ConsoleSpanExporter, SpanExporter, BatchSpanProcessor, SimpleSpanProcessor, SDKRegistrationConfig, SpanProcessor } from '@opentelemetry/sdk-trace-base'; -import { NodeTracerConfig, NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; +import { + ConsoleSpanExporter, + SpanExporter, + BatchSpanProcessor, + SimpleSpanProcessor, + SDKRegistrationConfig, + SpanProcessor, +} from '@opentelemetry/sdk-trace-base'; +import { + NodeTracerConfig, + NodeTracerProvider, +} from '@opentelemetry/sdk-trace-node'; import { OTLPTraceExporter as OTLPProtoTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto'; -import { OTLPTraceExporter as OTLPHttpTraceExporter} from '@opentelemetry/exporter-trace-otlp-http'; -import { OTLPTraceExporter as OTLPGrpcTraceExporter} from '@opentelemetry/exporter-trace-otlp-grpc'; +import { OTLPTraceExporter as OTLPHttpTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; +import { OTLPTraceExporter as OTLPGrpcTraceExporter } from '@opentelemetry/exporter-trace-otlp-grpc'; import { ZipkinExporter } from '@opentelemetry/exporter-zipkin'; import { JaegerExporter } from '@opentelemetry/exporter-jaeger'; @@ -34,45 +44,58 @@ export class TracerProviderWithEnvExporters extends NodeTracerProvider { switch (protocol) { case 'grpc': - return new OTLPGrpcTraceExporter; + return new OTLPGrpcTraceExporter(); case 'http/json': - return new OTLPHttpTraceExporter; + return new OTLPHttpTraceExporter(); case 'http/protobuf': - return new OTLPProtoTraceExporter; + return new OTLPProtoTraceExporter(); default: - diag.warn(`Unsupported OTLP traces protocol: ${protocol}. Using http/protobuf.`); - return new OTLPProtoTraceExporter; + diag.warn( + `Unsupported OTLP traces protocol: ${protocol}. Using http/protobuf.` + ); + return new OTLPProtoTraceExporter(); } } static getOtlpProtocol(): string { const parsedEnvValues = getEnvWithoutDefaults(); - return parsedEnvValues.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL ?? - parsedEnvValues.OTEL_EXPORTER_OTLP_PROTOCOL ?? - getEnv().OTEL_EXPORTER_OTLP_TRACES_PROTOCOL ?? - getEnv().OTEL_EXPORTER_OTLP_PROTOCOL; + return ( + parsedEnvValues.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL ?? + parsedEnvValues.OTEL_EXPORTER_OTLP_PROTOCOL ?? + getEnv().OTEL_EXPORTER_OTLP_TRACES_PROTOCOL ?? + getEnv().OTEL_EXPORTER_OTLP_PROTOCOL + ); } protected static override _registeredExporters = new Map< string, () => SpanExporter - >([ - ['otlp', () => this.configureOtlp()], - ['zipkin', () => new ZipkinExporter], - ['jaeger', () => new JaegerExporter], - ['console', () => new ConsoleSpanExporter] - ]); + >([ + ['otlp', () => this.configureOtlp()], + ['zipkin', () => new ZipkinExporter()], + ['jaeger', () => new JaegerExporter()], + ['console', () => new ConsoleSpanExporter()], + ]); public constructor(config: NodeTracerConfig = {}) { super(config); - let traceExportersList = this.filterBlanksAndNulls(Array.from(new Set(getEnv().OTEL_TRACES_EXPORTER.split(',')))); + let traceExportersList = this.filterBlanksAndNulls( + Array.from(new Set(getEnv().OTEL_TRACES_EXPORTER.split(','))) + ); if (traceExportersList.length === 0 || traceExportersList[0] === 'none') { - diag.warn('OTEL_TRACES_EXPORTER contains "none" or is empty. SDK will not be initialized.'); + diag.warn( + 'OTEL_TRACES_EXPORTER contains "none" or is empty. SDK will not be initialized.' + ); } else { - if (traceExportersList.length > 1 && traceExportersList.includes('none')) { - diag.warn('OTEL_TRACES_EXPORTER contains "none" along with other exporters. Using default otlp exporter.'); + if ( + traceExportersList.length > 1 && + traceExportersList.includes('none') + ) { + diag.warn( + 'OTEL_TRACES_EXPORTER contains "none" along with other exporters. Using default otlp exporter.' + ); traceExportersList = ['otlp']; } @@ -81,17 +104,23 @@ export class TracerProviderWithEnvExporters extends NodeTracerProvider { if (exporter) { this._configuredExporters.push(exporter); } else { - diag.warn(`Unrecognized OTEL_TRACES_EXPORTER value: ${exporterName}.`); + diag.warn( + `Unrecognized OTEL_TRACES_EXPORTER value: ${exporterName}.` + ); } }); if (this._configuredExporters.length > 0) { - this._spanProcessors = this.configureSpanProcessors(this._configuredExporters); + this._spanProcessors = this.configureSpanProcessors( + this._configuredExporters + ); this._spanProcessors.forEach(processor => { this.addSpanProcessor(processor); }); } else { - diag.warn('Unable to set up trace exporter(s) due to invalid exporter and/or protocol values.'); + diag.warn( + 'Unable to set up trace exporter(s) due to invalid exporter and/or protocol values.' + ); } } } @@ -107,7 +136,9 @@ export class TracerProviderWithEnvExporters extends NodeTracerProvider { } } - private configureSpanProcessors(exporters: SpanExporter[]): (BatchSpanProcessor | SimpleSpanProcessor)[] { + private configureSpanProcessors( + exporters: SpanExporter[] + ): (BatchSpanProcessor | SimpleSpanProcessor)[] { return exporters.map(exporter => { if (exporter instanceof ConsoleSpanExporter) { return new SimpleSpanProcessor(exporter); @@ -118,7 +149,6 @@ export class TracerProviderWithEnvExporters extends NodeTracerProvider { } private filterBlanksAndNulls(list: string[]): string[] { - return list.map(item => item.trim()) - .filter(s => s !== 'null' && s !== ''); + return list.map(item => item.trim()).filter(s => s !== 'null' && s !== ''); } } diff --git a/experimental/packages/opentelemetry-sdk-node/src/sdk.ts b/experimental/packages/opentelemetry-sdk-node/src/sdk.ts index 38f85b87f2..ca8ae65573 100644 --- a/experimental/packages/opentelemetry-sdk-node/src/sdk.ts +++ b/experimental/packages/opentelemetry-sdk-node/src/sdk.ts @@ -17,7 +17,7 @@ import { ContextManager, TextMapPropagator, metrics } from '@opentelemetry/api'; import { InstrumentationOption, - registerInstrumentations + registerInstrumentations, } from '@opentelemetry/instrumentation'; import { Detector, @@ -25,14 +25,17 @@ import { envDetector, processDetector, Resource, - ResourceDetectionConfig + ResourceDetectionConfig, } from '@opentelemetry/resources'; import { MeterProvider, MetricReader, View } from '@opentelemetry/sdk-metrics'; import { BatchSpanProcessor, SpanProcessor, } from '@opentelemetry/sdk-trace-base'; -import { NodeTracerConfig, NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; +import { + NodeTracerConfig, + NodeTracerProvider, +} from '@opentelemetry/sdk-trace-node'; import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'; import { NodeSDKConfiguration } from './types'; import { TracerProviderWithEnvExporters } from './TracerProviderWithEnvExporter'; @@ -43,11 +46,11 @@ export type MeterProviderConfig = { /** * Reference to the MetricReader instance by the NodeSDK */ - reader?: MetricReader + reader?: MetricReader; /** * List of {@link View}s that should be passed to the MeterProvider */ - views?: View[] + views?: View[]; }; export class NodeSDK { private _tracerProviderConfig?: { @@ -73,7 +76,10 @@ export class NodeSDK { */ public constructor(configuration: Partial = {}) { this._resource = configuration.resource ?? new Resource({}); - this._resourceDetectors = configuration.resourceDetectors ?? [envDetector, processDetector]; + this._resourceDetectors = configuration.resourceDetectors ?? [ + envDetector, + processDetector, + ]; this._serviceName = configuration.serviceName; @@ -156,7 +162,9 @@ export class NodeSDK { // make sure we do not override existing reader with another reader. if (this._meterProviderConfig.reader != null && config.reader != null) { - throw new Error('MetricReader passed but MetricReader has already been configured.'); + throw new Error( + 'MetricReader passed but MetricReader has already been configured.' + ); } // set reader, but make sure we do not override existing reader with null/undefined. @@ -187,16 +195,20 @@ export class NodeSDK { await this.detectResources(); } - this._resource = this._serviceName === undefined - ? this._resource - : this._resource.merge(new Resource( - { [SemanticResourceAttributes.SERVICE_NAME]: this._serviceName } - )); + this._resource = + this._serviceName === undefined + ? this._resource + : this._resource.merge( + new Resource({ + [SemanticResourceAttributes.SERVICE_NAME]: this._serviceName, + }) + ); - const Provider = - this._tracerProviderConfig ? NodeTracerProvider : TracerProviderWithEnvExporters; + const Provider = this._tracerProviderConfig + ? NodeTracerProvider + : TracerProviderWithEnvExporters; - const tracerProvider = new Provider ({ + const tracerProvider = new Provider({ ...this._tracerProviderConfig?.tracerConfig, resource: this._resource, }); @@ -244,8 +256,7 @@ export class NodeSDK { return ( Promise.all(promises) // return void instead of the array from Promise.all - .then(() => { - }) + .then(() => {}) ); } } diff --git a/experimental/packages/opentelemetry-sdk-node/src/types.ts b/experimental/packages/opentelemetry-sdk-node/src/types.ts index 722201f03f..4c816516dc 100644 --- a/experimental/packages/opentelemetry-sdk-node/src/types.ts +++ b/experimental/packages/opentelemetry-sdk-node/src/types.ts @@ -32,7 +32,7 @@ export interface NodeSDKConfiguration { defaultAttributes: SpanAttributes; textMapPropagator: TextMapPropagator; metricReader: MetricReader; - views: View[] + views: View[]; instrumentations: InstrumentationOption[]; resource: Resource; resourceDetectors: Detector[]; diff --git a/experimental/packages/opentelemetry-sdk-node/test/TracerProviderWithEnvExporter.test.ts b/experimental/packages/opentelemetry-sdk-node/test/TracerProviderWithEnvExporter.test.ts index 2ba4d0394a..c99d378617 100644 --- a/experimental/packages/opentelemetry-sdk-node/test/TracerProviderWithEnvExporter.test.ts +++ b/experimental/packages/opentelemetry-sdk-node/test/TracerProviderWithEnvExporter.test.ts @@ -14,9 +14,7 @@ * limitations under the License. */ -import { - diag, -} from '@opentelemetry/api'; +import { diag } from '@opentelemetry/api'; import { ConsoleSpanExporter, SimpleSpanProcessor, @@ -25,9 +23,12 @@ import { import * as assert from 'assert'; import * as Sinon from 'sinon'; import { env } from 'process'; -import { OTLPTraceExporter as OTLPProtoTraceExporter, OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto'; -import { OTLPTraceExporter as OTLPHttpTraceExporter} from '@opentelemetry/exporter-trace-otlp-http'; -import { OTLPTraceExporter as OTLPGrpcTraceExporter} from '@opentelemetry/exporter-trace-otlp-grpc'; +import { + OTLPTraceExporter as OTLPProtoTraceExporter, + OTLPTraceExporter, +} from '@opentelemetry/exporter-trace-otlp-proto'; +import { OTLPTraceExporter as OTLPHttpTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; +import { OTLPTraceExporter as OTLPGrpcTraceExporter } from '@opentelemetry/exporter-trace-otlp-grpc'; import { ZipkinExporter } from '@opentelemetry/exporter-zipkin'; import { JaegerExporter } from '@opentelemetry/exporter-jaeger'; import { TracerProviderWithEnvExporters } from '../src/TracerProviderWithEnvExporter'; @@ -37,7 +38,10 @@ describe('set up trace exporter with env exporters', () => { let stubLoggerError: Sinon.SinonStub; beforeEach(() => { - spyGetOtlpProtocol = Sinon.spy(TracerProviderWithEnvExporters, 'getOtlpProtocol'); + spyGetOtlpProtocol = Sinon.spy( + TracerProviderWithEnvExporters, + 'getOtlpProtocol' + ); stubLoggerError = Sinon.stub(diag, 'warn'); }); afterEach(() => { @@ -118,7 +122,10 @@ describe('set up trace exporter with env exporters', () => { env.OTEL_TRACES_EXPORTER = 'none'; new TracerProviderWithEnvExporters(); - assert.strictEqual(stubLoggerError.args[0][0], 'OTEL_TRACES_EXPORTER contains "none" or is empty. SDK will not be initialized.'); + assert.strictEqual( + stubLoggerError.args[0][0], + 'OTEL_TRACES_EXPORTER contains "none" or is empty. SDK will not be initialized.' + ); delete env.OTEL_TRACES_EXPORTER; }); it('use default exporter when none value is provided with other exports', async () => { @@ -140,7 +147,8 @@ describe('set up trace exporter with env exporters', () => { new TracerProviderWithEnvExporters(); assert.strictEqual( - stubLoggerError.args[0][0], 'OTEL_TRACES_EXPORTER contains "none" along with other exporters. Using default otlp exporter.' + stubLoggerError.args[0][0], + 'OTEL_TRACES_EXPORTER contains "none" along with other exporters. Using default otlp exporter.' ); delete env.OTEL_TRACES_EXPORTER; }); @@ -149,11 +157,13 @@ describe('set up trace exporter with env exporters', () => { new TracerProviderWithEnvExporters(); assert.strictEqual( - stubLoggerError.args[0][0], 'Unrecognized OTEL_TRACES_EXPORTER value: invalid.' + stubLoggerError.args[0][0], + 'Unrecognized OTEL_TRACES_EXPORTER value: invalid.' ); assert.strictEqual( - stubLoggerError.args[1][0], 'Unable to set up trace exporter(s) due to invalid exporter and/or protocol values.' + stubLoggerError.args[1][0], + 'Unable to set up trace exporter(s) due to invalid exporter and/or protocol values.' ); delete env.OTEL_TRACES_EXPORTER; @@ -163,7 +173,8 @@ describe('set up trace exporter with env exporters', () => { new TracerProviderWithEnvExporters(); assert.strictEqual( - stubLoggerError.args[0][0], 'Unsupported OTLP traces protocol: invalid. Using http/protobuf.' + stubLoggerError.args[0][0], + 'Unsupported OTLP traces protocol: invalid. Using http/protobuf.' ); delete env.OTEL_EXPORTER_OTLP_PROTOCOL; }); diff --git a/experimental/packages/opentelemetry-sdk-node/test/sdk.test.ts b/experimental/packages/opentelemetry-sdk-node/test/sdk.test.ts index 6d853e4902..a37f33f809 100644 --- a/experimental/packages/opentelemetry-sdk-node/test/sdk.test.ts +++ b/experimental/packages/opentelemetry-sdk-node/test/sdk.test.ts @@ -38,9 +38,7 @@ import { View, } from '@opentelemetry/sdk-metrics'; import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; -import { - assertServiceResource, -} from './util/resource-assertions'; +import { assertServiceResource } from './util/resource-assertions'; import { ConsoleSpanExporter, SimpleSpanProcessor, @@ -56,7 +54,7 @@ import { TracerProviderWithEnvExporters } from '../src/TracerProviderWithEnvExpo import { envDetector, processDetector, - Resource + Resource, } from '@opentelemetry/resources'; import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; @@ -91,9 +89,21 @@ describe('Node SDK', () => { await sdk.start(); - assert.strictEqual(context['_getContextManager'](), ctxManager, 'context manager should not change'); - assert.strictEqual(propagation['_getGlobalPropagator'](), propagator, 'propagator should not change'); - assert.strictEqual((trace.getTracerProvider() as ProxyTracerProvider).getDelegate(), delegate, 'tracer provider should not have changed'); + assert.strictEqual( + context['_getContextManager'](), + ctxManager, + 'context manager should not change' + ); + assert.strictEqual( + propagation['_getGlobalPropagator'](), + propagator, + 'propagator should not change' + ); + assert.strictEqual( + (trace.getTracerProvider() as ProxyTracerProvider).getDelegate(), + delegate, + 'tracer provider should not have changed' + ); assert.ok(!(metrics.getMeterProvider() instanceof MeterProvider)); delete env.OTEL_TRACES_EXPORTER; }); @@ -109,12 +119,14 @@ describe('Node SDK', () => { assert.ok(!(metrics.getMeterProvider() instanceof MeterProvider)); assert.ok( - context['_getContextManager']().constructor.name === DefaultContextManager.name + context['_getContextManager']().constructor.name === + DefaultContextManager.name ); assert.ok( propagation['_getGlobalPropagator']() instanceof CompositePropagator ); - const apiTracerProvider = trace.getTracerProvider() as ProxyTracerProvider; + const apiTracerProvider = + trace.getTracerProvider() as ProxyTracerProvider; assert.ok(apiTracerProvider.getDelegate() instanceof NodeTracerProvider); }); @@ -132,12 +144,14 @@ describe('Node SDK', () => { assert.ok(!(metrics.getMeterProvider() instanceof MeterProvider)); assert.ok( - context['_getContextManager']().constructor.name === DefaultContextManager.name + context['_getContextManager']().constructor.name === + DefaultContextManager.name ); assert.ok( propagation['_getGlobalPropagator']() instanceof CompositePropagator ); - const apiTracerProvider = trace.getTracerProvider() as ProxyTracerProvider; + const apiTracerProvider = + trace.getTracerProvider() as ProxyTracerProvider; assert.ok(apiTracerProvider.getDelegate() instanceof NodeTracerProvider); }); @@ -149,7 +163,7 @@ describe('Node SDK', () => { const metricReader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 100, - exportTimeoutMillis: 100 + exportTimeoutMillis: 100, }); const sdk = new NodeSDK({ @@ -159,9 +173,21 @@ describe('Node SDK', () => { await sdk.start(); - assert.strictEqual(context['_getContextManager'](), ctxManager, 'context manager should not change'); - assert.strictEqual(propagation['_getGlobalPropagator'](), propagator, 'propagator should not change'); - assert.strictEqual((trace.getTracerProvider() as ProxyTracerProvider).getDelegate(), delegate, 'tracer provider should not have changed'); + assert.strictEqual( + context['_getContextManager'](), + ctxManager, + 'context manager should not change' + ); + assert.strictEqual( + propagation['_getGlobalPropagator'](), + propagator, + 'propagator should not change' + ); + assert.strictEqual( + (trace.getTracerProvider() as ProxyTracerProvider).getDelegate(), + delegate, + 'tracer provider should not have changed' + ); assert.ok(metrics.getMeterProvider() instanceof MeterProvider); @@ -170,7 +196,10 @@ describe('Node SDK', () => { }); }); - async function waitForNumberOfMetrics(exporter: InMemoryMetricExporter, numberOfMetrics: number): Promise { + async function waitForNumberOfMetrics( + exporter: InMemoryMetricExporter, + numberOfMetrics: number + ): Promise { if (numberOfMetrics <= 0) { throw new Error('numberOfMetrics must be greater than or equal to 0'); } @@ -187,11 +216,13 @@ describe('Node SDK', () => { // need to set OTEL_TRACES_EXPORTER to none since default value is otlp // which sets up an exporter and affects the context manager env.OTEL_TRACES_EXPORTER = 'none'; - const exporter = new InMemoryMetricExporter(AggregationTemporality.CUMULATIVE); + const exporter = new InMemoryMetricExporter( + AggregationTemporality.CUMULATIVE + ); const metricReader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 100, - exportTimeoutMillis: 100 + exportTimeoutMillis: 100, }); const sdk = new NodeSDK({ @@ -201,16 +232,28 @@ describe('Node SDK', () => { name: 'test-view', instrumentName: 'test_counter', instrumentType: InstrumentType.COUNTER, - }) + }), ], autoDetectResources: false, }); await sdk.start(); - assert.strictEqual(context['_getContextManager'](), ctxManager, 'context manager should not change'); - assert.strictEqual(propagation['_getGlobalPropagator'](), propagator, 'propagator should not change'); - assert.strictEqual((trace.getTracerProvider() as ProxyTracerProvider).getDelegate(), delegate, 'tracer provider should not have changed'); + assert.strictEqual( + context['_getContextManager'](), + ctxManager, + 'context manager should not change' + ); + assert.strictEqual( + propagation['_getGlobalPropagator'](), + propagator, + 'propagator should not change' + ); + assert.strictEqual( + (trace.getTracerProvider() as ProxyTracerProvider).getDelegate(), + delegate, + 'tracer provider should not have changed' + ); const meterProvider = metrics.getMeterProvider() as MeterProvider; assert.ok(meterProvider); @@ -227,21 +270,32 @@ describe('Node SDK', () => { assert.ok(firstExportedMetric, 'should have one exported metric'); const [firstScopeMetric] = firstExportedMetric.scopeMetrics; assert.ok(firstScopeMetric, 'should have one scope metric'); - assert.ok(firstScopeMetric.scope.name === 'NodeSDKViews', 'scope should match created view'); - assert.ok(firstScopeMetric.metrics.length > 0, 'should have at least one metrics entry'); + assert.ok( + firstScopeMetric.scope.name === 'NodeSDKViews', + 'scope should match created view' + ); + assert.ok( + firstScopeMetric.metrics.length > 0, + 'should have at least one metrics entry' + ); const [firstMetricRecord] = firstScopeMetric.metrics; - assert.ok(firstMetricRecord.descriptor.name === 'test-view', 'should have renamed counter metric'); + assert.ok( + firstMetricRecord.descriptor.name === 'test-view', + 'should have renamed counter metric' + ); await sdk.shutdown(); delete env.OTEL_TRACES_EXPORTER; }); it('should throw error when calling configureMeterProvider when views are already configured', () => { - const exporter = new InMemoryMetricExporter(AggregationTemporality.CUMULATIVE); + const exporter = new InMemoryMetricExporter( + AggregationTemporality.CUMULATIVE + ); const metricReader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 100, - exportTimeoutMillis: 100 + exportTimeoutMillis: 100, }); const sdk = new NodeSDK({ @@ -251,33 +305,40 @@ describe('Node SDK', () => { name: 'test-view', instrumentName: 'test_counter', instrumentType: InstrumentType.COUNTER, - }) + }), ], autoDetectResources: false, }); - assert.throws(() => { - sdk.configureMeterProvider({ - reader: metricReader, - views: [ - new View({ - name: 'test-view', - instrumentName: 'test_counter', - instrumentType: InstrumentType.COUNTER, - }) - ] - }); - }, (error: Error) => { - return error.message.includes('Views passed but Views have already been configured'); - }); + assert.throws( + () => { + sdk.configureMeterProvider({ + reader: metricReader, + views: [ + new View({ + name: 'test-view', + instrumentName: 'test_counter', + instrumentType: InstrumentType.COUNTER, + }), + ], + }); + }, + (error: Error) => { + return error.message.includes( + 'Views passed but Views have already been configured' + ); + } + ); }); it('should throw error when calling configureMeterProvider when metricReader is already configured', () => { - const exporter = new InMemoryMetricExporter(AggregationTemporality.CUMULATIVE); + const exporter = new InMemoryMetricExporter( + AggregationTemporality.CUMULATIVE + ); const metricReader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 100, - exportTimeoutMillis: 100 + exportTimeoutMillis: 100, }); const sdk = new NodeSDK({ @@ -287,18 +348,23 @@ describe('Node SDK', () => { name: 'test-view', instrumentName: 'test_counter', instrumentType: InstrumentType.COUNTER, - }) + }), ], autoDetectResources: false, }); - assert.throws(() => { - sdk.configureMeterProvider({ - reader: metricReader, - }); - }, (error: Error) => { - return error.message.includes('MetricReader passed but MetricReader has already been configured.'); - }); + assert.throws( + () => { + sdk.configureMeterProvider({ + reader: metricReader, + }); + }, + (error: Error) => { + return error.message.includes( + 'MetricReader passed but MetricReader has already been configured.' + ); + } + ); }); describe('detectResources', async () => { @@ -315,20 +381,20 @@ describe('Node SDK', () => { it('returns a merged resource', async () => { const sdk = new NodeSDK({ autoDetectResources: true, - resourceDetectors: [processDetector, { - async detect(): Promise { - return new Resource({'customAttr': 'someValue'}); - } - }, - envDetector] + resourceDetectors: [ + processDetector, + { + async detect(): Promise { + return new Resource({ customAttr: 'someValue' }); + }, + }, + envDetector, + ], }); await sdk.detectResources(); const resource = sdk['_resource']; - assert.strictEqual( - resource.attributes['customAttr'], - 'someValue' - ); + assert.strictEqual(resource.attributes['customAttr'], 'someValue'); assertServiceResource(resource, { instanceId: '627cc493', @@ -343,12 +409,15 @@ describe('Node SDK', () => { it('returns a merged resource', async () => { const sdk = new NodeSDK({ autoDetectResources: true, - resourceDetectors: [processDetector, { - detect() { - throw new Error('Buggy detector'); - } - }, - envDetector] + resourceDetectors: [ + processDetector, + { + detect() { + throw new Error('Buggy detector'); + }, + }, + envDetector, + ], }); await sdk.detectResources(); @@ -487,7 +556,8 @@ describe('Node SDK', () => { }); it('should configure service name via OTEL_RESOURCE_ATTRIBUTES env var', async () => { - process.env.OTEL_RESOURCE_ATTRIBUTES = 'service.name=resource-env-set-name'; + process.env.OTEL_RESOURCE_ATTRIBUTES = + 'service.name=resource-env-set-name'; const sdk = new NodeSDK(); await sdk.start(); @@ -500,7 +570,8 @@ describe('Node SDK', () => { }); it('should favor config set service name over OTEL_RESOURCE_ATTRIBUTES env set service name', async () => { - process.env.OTEL_RESOURCE_ATTRIBUTES = 'service.name=resource-env-set-name'; + process.env.OTEL_RESOURCE_ATTRIBUTES = + 'service.name=resource-env-set-name'; const sdk = new NodeSDK({ serviceName: 'config-set-name', }); @@ -521,7 +592,10 @@ describe('setup exporter from env', () => { let stubLoggerError: Sinon.SinonStub; beforeEach(() => { - spyGetOtlpProtocol = Sinon.spy(TracerProviderWithEnvExporters, 'getOtlpProtocol'); + spyGetOtlpProtocol = Sinon.spy( + TracerProviderWithEnvExporters, + 'getOtlpProtocol' + ); stubLoggerError = Sinon.stub(diag, 'warn'); }); afterEach(() => { @@ -531,7 +605,8 @@ describe('setup exporter from env', () => { it('use default exporter TracerProviderWithEnvExporters when user does not provide span processor or trace exporter to sdk config', async () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters); assert(listOfProcessors.length === 1); @@ -540,12 +615,15 @@ describe('setup exporter from env', () => { it('ignore env exporter when user provides exporter to sdk config', async () => { const traceExporter = new ConsoleSpanExporter(); const sdk = new NodeSDK({ - traceExporter + traceExporter, }); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; - assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters === false); + assert( + sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters === false + ); assert(listOfProcessors.length === 1); assert(listOfProcessors[0] instanceof SimpleSpanProcessor === false); assert(listOfProcessors[0] instanceof BatchSpanProcessor); @@ -554,12 +632,15 @@ describe('setup exporter from env', () => { const traceExporter = new ConsoleSpanExporter(); const spanProcessor = new SimpleSpanProcessor(traceExporter); const sdk = new NodeSDK({ - spanProcessor + spanProcessor, }); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; - assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters === false); + assert( + sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters === false + ); assert(listOfProcessors.length === 1); assert(listOfProcessors[0] instanceof SimpleSpanProcessor); assert(listOfProcessors[0] instanceof BatchSpanProcessor === false); @@ -568,12 +649,15 @@ describe('setup exporter from env', () => { env.OTEL_TRACES_EXPORTER = 'console'; const traceExporter = new OTLPTraceExporter(); const sdk = new NodeSDK({ - traceExporter + traceExporter, }); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; - assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters === false); + assert( + sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters === false + ); assert(listOfProcessors.length === 1); assert(listOfProcessors[0] instanceof SimpleSpanProcessor === false); assert(listOfProcessors[0] instanceof BatchSpanProcessor); @@ -585,7 +669,8 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters); assert(listOfProcessors.length === 1); assert(listOfProcessors[0] instanceof BatchSpanProcessor); @@ -597,7 +682,8 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; const activeProcessor = sdk['_tracerProvider']?.getActiveSpanProcessor(); assert(listOfProcessors.length === 0); @@ -609,7 +695,10 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - assert.strictEqual(stubLoggerError.args[0][0], 'OTEL_TRACES_EXPORTER contains "none" or is empty. SDK will not be initialized.'); + assert.strictEqual( + stubLoggerError.args[0][0], + 'OTEL_TRACES_EXPORTER contains "none" or is empty. SDK will not be initialized.' + ); delete env.OTEL_TRACES_EXPORTER; }); it('do not use any exporters when empty value is provided for exporter', async () => { @@ -617,7 +706,8 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; const activeProcessor = sdk['_tracerProvider']?.getActiveSpanProcessor(); assert(listOfProcessors.length === 0); @@ -630,7 +720,8 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters); assert(listOfProcessors.length === 1); assert(listOfProcessors[0] instanceof BatchSpanProcessor); @@ -643,7 +734,8 @@ describe('setup exporter from env', () => { await sdk.start(); assert.strictEqual( - stubLoggerError.args[0][0], 'OTEL_TRACES_EXPORTER contains "none" along with other exporters. Using default otlp exporter.' + stubLoggerError.args[0][0], + 'OTEL_TRACES_EXPORTER contains "none" along with other exporters. Using default otlp exporter.' ); delete env.OTEL_TRACES_EXPORTER; }); @@ -653,11 +745,13 @@ describe('setup exporter from env', () => { await sdk.start(); assert.strictEqual( - stubLoggerError.args[0][0], 'Unrecognized OTEL_TRACES_EXPORTER value: invalid.' + stubLoggerError.args[0][0], + 'Unrecognized OTEL_TRACES_EXPORTER value: invalid.' ); assert.strictEqual( - stubLoggerError.args[1][0], 'Unable to set up trace exporter(s) due to invalid exporter and/or protocol values.' + stubLoggerError.args[1][0], + 'Unable to set up trace exporter(s) due to invalid exporter and/or protocol values.' ); delete env.OTEL_TRACES_EXPORTER; @@ -668,7 +762,8 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; assert(sdk['_tracerProvider'] instanceof TracerProviderWithEnvExporters); assert(listOfProcessors.length === 3); assert(listOfProcessors[0] instanceof BatchSpanProcessor); @@ -683,7 +778,8 @@ describe('setup exporter from env', () => { const sdk = new NodeSDK(); await sdk.start(); - const listOfProcessors = sdk['_tracerProvider']!['_registeredSpanProcessors']!; + const listOfProcessors = + sdk['_tracerProvider']!['_registeredSpanProcessors']!; assert(listOfProcessors.length === 2); assert(listOfProcessors[0] instanceof SimpleSpanProcessor); assert(listOfProcessors[1] instanceof BatchSpanProcessor); diff --git a/experimental/packages/opentelemetry-sdk-node/test/util/resource-assertions.ts b/experimental/packages/opentelemetry-sdk-node/test/util/resource-assertions.ts index 331fed6e8e..cd6b272433 100644 --- a/experimental/packages/opentelemetry-sdk-node/test/util/resource-assertions.ts +++ b/experimental/packages/opentelemetry-sdk-node/test/util/resource-assertions.ts @@ -306,12 +306,14 @@ export const assertEmptyResource = (resource: Resource) => { }; const assertHasOneLabel = (prefix: string, resource: Resource): void => { - const hasOne = Object.entries(SemanticResourceAttributes).find(([key, value]) => { - return ( - key.startsWith(prefix) && - Object.prototype.hasOwnProperty.call(resource.attributes, value) - ); - }); + const hasOne = Object.entries(SemanticResourceAttributes).find( + ([key, value]) => { + return ( + key.startsWith(prefix) && + Object.prototype.hasOwnProperty.call(resource.attributes, value) + ); + } + ); assert.ok( hasOne, diff --git a/experimental/packages/otlp-exporter-base/src/OTLPExporterBase.ts b/experimental/packages/otlp-exporter-base/src/OTLPExporterBase.ts index 013879dd25..c78adb7e27 100644 --- a/experimental/packages/otlp-exporter-base/src/OTLPExporterBase.ts +++ b/experimental/packages/otlp-exporter-base/src/OTLPExporterBase.ts @@ -15,7 +15,11 @@ */ import { diag } from '@opentelemetry/api'; -import { ExportResult, ExportResultCode, BindOnceFuture } from '@opentelemetry/core'; +import { + ExportResult, + ExportResultCode, + BindOnceFuture, +} from '@opentelemetry/core'; import { OTLPExporterError, OTLPExporterConfigBase, @@ -30,7 +34,7 @@ export abstract class OTLPExporterBase< T extends OTLPExporterConfigBase, ExportItem, ServiceRequest - > { +> { public readonly url: string; public readonly hostname: string | undefined; public readonly timeoutMillis: number; @@ -66,7 +70,10 @@ export abstract class OTLPExporterBase< * @param items * @param resultCallback */ - export(items: ExportItem[], resultCallback: (result: ExportResult) => void): void { + export( + items: ExportItem[], + resultCallback: (result: ExportResult) => void + ): void { if (this._shutdownOnce.isCalled) { resultCallback({ code: ExportResultCode.FAILED, @@ -116,10 +123,9 @@ export abstract class OTLPExporterBase< private _shutdown(): Promise { diag.debug('shutdown started'); this.onShutdown(); - return Promise.all(this._sendingPromises) - .then(() => { - /** ignore resolved values */ - }); + return Promise.all(this._sendingPromises).then(() => { + /** ignore resolved values */ + }); } abstract onShutdown(): void; diff --git a/experimental/packages/otlp-exporter-base/src/platform/browser/OTLPExporterBrowserBase.ts b/experimental/packages/otlp-exporter-base/src/platform/browser/OTLPExporterBrowserBase.ts index 46948167b2..57556d81a5 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/browser/OTLPExporterBrowserBase.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/browser/OTLPExporterBrowserBase.ts @@ -28,11 +28,7 @@ import { getEnv, baggageUtils } from '@opentelemetry/core'; export abstract class OTLPExporterBrowserBase< ExportItem, ServiceRequest - > extends OTLPExporterBase< - OTLPExporterConfigBase, - ExportItem, - ServiceRequest - > { +> extends OTLPExporterBase { protected _headers: Record; private _useXHR: boolean = false; @@ -78,12 +74,24 @@ export abstract class OTLPExporterBrowserBase< const promise = new Promise((resolve, reject) => { if (this._useXHR) { - sendWithXhr(body, this.url, this._headers, this.timeoutMillis, resolve, reject); + sendWithXhr( + body, + this.url, + this._headers, + this.timeoutMillis, + resolve, + reject + ); } else { - sendWithBeacon(body, this.url, { type: 'application/json' }, resolve, reject); + sendWithBeacon( + body, + this.url, + { type: 'application/json' }, + resolve, + reject + ); } - }) - .then(onSuccess, onError); + }).then(onSuccess, onError); this._sendingPromises.push(promise); const popPromise = () => { diff --git a/experimental/packages/otlp-exporter-base/src/platform/browser/util.ts b/experimental/packages/otlp-exporter-base/src/platform/browser/util.ts index 4844037056..8c311fe0ed 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/browser/util.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/browser/util.ts @@ -35,9 +35,7 @@ export function sendWithBeacon( diag.debug('sendBeacon - can send', body); onSuccess(); } else { - const error = new OTLPExporterError( - `sendBeacon - cannot send ${body}` - ); + const error = new OTLPExporterError(`sendBeacon - cannot send ${body}`); onError(error); } } @@ -70,7 +68,7 @@ export function sendWithXhr( xhr.open('POST', url); const defaultHeaders = { - 'Accept': 'application/json', + Accept: 'application/json', 'Content-Type': 'application/json', }; @@ -90,9 +88,7 @@ export function sendWithXhr( diag.debug('xhr success', body); onSuccess(); } else if (reqIsDestroyed) { - const error = new OTLPExporterError( - 'Request Timeout', xhr.status - ); + const error = new OTLPExporterError('Request Timeout', xhr.status); onError(error); } else { const error = new OTLPExporterError( diff --git a/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts b/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts index 9bc6bb3cfc..088a0fd0a0 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts @@ -77,8 +77,7 @@ export abstract class OTLPExporterNodeBase< resolve, reject ); - }) - .then(onSuccess, onError); + }).then(onSuccess, onError); this._sendingPromises.push(promise); const popPromise = () => { diff --git a/experimental/packages/otlp-exporter-base/src/platform/node/types.ts b/experimental/packages/otlp-exporter-base/src/platform/node/types.ts index a37f5c3c9c..b1e355de2d 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/node/types.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/node/types.ts @@ -21,8 +21,7 @@ import { OTLPExporterConfigBase } from '../../types'; /** * Collector Exporter node base config */ -export interface OTLPExporterNodeConfigBase - extends OTLPExporterConfigBase { +export interface OTLPExporterNodeConfigBase extends OTLPExporterConfigBase { keepAlive?: boolean; compression?: CompressionAlgorithm; httpAgentOptions?: http.AgentOptions | https.AgentOptions; @@ -30,5 +29,5 @@ export interface OTLPExporterNodeConfigBase export enum CompressionAlgorithm { NONE = 'none', - GZIP = 'gzip' + GZIP = 'gzip', } diff --git a/experimental/packages/otlp-exporter-base/src/platform/node/util.ts b/experimental/packages/otlp-exporter-base/src/platform/node/util.ts index 2d2a47471d..d5636d14e6 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/node/util.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/node/util.ts @@ -75,9 +75,7 @@ export function sendWithHttp( res.on('aborted', () => { if (reqIsDestroyed) { - const err = new OTLPExporterError( - 'Request Timeout' - ); + const err = new OTLPExporterError('Request Timeout'); onError(err); } }); @@ -102,9 +100,7 @@ export function sendWithHttp( req.on('error', (error: Error | any) => { if (reqIsDestroyed) { - const err = new OTLPExporterError( - 'Request Timeout', error.code - ); + const err = new OTLPExporterError('Request Timeout', error.code); onError(err); } else { clearTimeout(exporterTimer); @@ -116,8 +112,10 @@ export function sendWithHttp( case CompressionAlgorithm.GZIP: { req.setHeader('Content-Encoding', 'gzip'); const dataStream = readableFromBuffer(data); - dataStream.on('error', onError) - .pipe(zlib.createGzip()).on('error', onError) + dataStream + .on('error', onError) + .pipe(zlib.createGzip()) + .on('error', onError) .pipe(req); break; @@ -158,11 +156,17 @@ export function createHttpAgent( } } -export function configureCompression(compression: CompressionAlgorithm | undefined): CompressionAlgorithm { +export function configureCompression( + compression: CompressionAlgorithm | undefined +): CompressionAlgorithm { if (compression) { return compression; } else { - const definedCompression = getEnv().OTEL_EXPORTER_OTLP_TRACES_COMPRESSION || getEnv().OTEL_EXPORTER_OTLP_COMPRESSION; - return definedCompression === CompressionAlgorithm.GZIP ? CompressionAlgorithm.GZIP : CompressionAlgorithm.NONE; + const definedCompression = + getEnv().OTEL_EXPORTER_OTLP_TRACES_COMPRESSION || + getEnv().OTEL_EXPORTER_OTLP_COMPRESSION; + return definedCompression === CompressionAlgorithm.GZIP + ? CompressionAlgorithm.GZIP + : CompressionAlgorithm.NONE; } } diff --git a/experimental/packages/otlp-exporter-base/src/util.ts b/experimental/packages/otlp-exporter-base/src/util.ts index 6290153bcb..99a9f6e333 100644 --- a/experimental/packages/otlp-exporter-base/src/util.ts +++ b/experimental/packages/otlp-exporter-base/src/util.ts @@ -73,7 +73,9 @@ export function appendRootPathToUrlIfNeeded(url: string): string { * @param timeoutMillis * @returns timeout value in milliseconds */ -export function configureExporterTimeout(timeoutMillis: number | undefined): number { +export function configureExporterTimeout( + timeoutMillis: number | undefined +): number { if (typeof timeoutMillis === 'number') { if (timeoutMillis <= 0) { // OTLP exporter configured timeout - using default value of 10000ms @@ -86,9 +88,10 @@ export function configureExporterTimeout(timeoutMillis: number | undefined): num } function getExporterTimeoutFromEnv(): number { - const definedTimeout = - Number(getEnv().OTEL_EXPORTER_OTLP_TRACES_TIMEOUT ?? - getEnv().OTEL_EXPORTER_OTLP_TIMEOUT); + const definedTimeout = Number( + getEnv().OTEL_EXPORTER_OTLP_TRACES_TIMEOUT ?? + getEnv().OTEL_EXPORTER_OTLP_TIMEOUT + ); if (definedTimeout <= 0) { // OTLP exporter configured timeout - using default value of 10000ms @@ -99,7 +102,10 @@ function getExporterTimeoutFromEnv(): number { } // OTLP exporter configured timeout - using default value of 10000ms -export function invalidTimeout(timeout: number, defaultTimeout: number): number { +export function invalidTimeout( + timeout: number, + defaultTimeout: number +): number { diag.warn('Timeout must be greater than 0', timeout); return defaultTimeout; diff --git a/experimental/packages/otlp-exporter-base/test/browser/util.test.ts b/experimental/packages/otlp-exporter-base/test/browser/util.test.ts index e72b8618f5..1dd3b77d58 100644 --- a/experimental/packages/otlp-exporter-base/test/browser/util.test.ts +++ b/experimental/packages/otlp-exporter-base/test/browser/util.test.ts @@ -39,9 +39,9 @@ describe('util - browser', () => { }); describe('when XMLHTTPRequest is used', () => { - let expectedHeaders: Record; + let expectedHeaders: Record; let clock: sinon.SinonFakeTimers; - beforeEach(()=>{ + beforeEach(() => { // fakeTimers is used to replace the next setTimeout which is // located in sendWithXhr function called by the export method clock = sinon.useFakeTimers(); @@ -49,19 +49,25 @@ describe('util - browser', () => { expectedHeaders = { // ;charset=utf-8 is applied by sinon.fakeServer 'Content-Type': 'application/json;charset=utf-8', - 'Accept': 'application/json', + Accept: 'application/json', }; }); describe('and Content-Type header is set', () => { - beforeEach(()=>{ + beforeEach(() => { const explicitContentType = { 'Content-Type': 'application/json', }; const exporterTimeout = 10000; - sendWithXhr(body, url, explicitContentType, exporterTimeout, onSuccessStub, onErrorStub); + sendWithXhr( + body, + url, + explicitContentType, + exporterTimeout, + onSuccessStub, + onErrorStub + ); }); it('Request Headers should contain "Content-Type" header', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, expectedHeaders); @@ -70,7 +76,6 @@ describe('util - browser', () => { }); }); it('Request Headers should contain "Accept" header', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, expectedHeaders); @@ -81,14 +86,20 @@ describe('util - browser', () => { }); describe('and empty headers are set', () => { - beforeEach(()=>{ + beforeEach(() => { const emptyHeaders = {}; // use default exporter timeout const exporterTimeout = 10000; - sendWithXhr(body, url, emptyHeaders, exporterTimeout, onSuccessStub, onErrorStub); + sendWithXhr( + body, + url, + emptyHeaders, + exporterTimeout, + onSuccessStub, + onErrorStub + ); }); it('Request Headers should contain "Content-Type" header', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, expectedHeaders); @@ -97,7 +108,6 @@ describe('util - browser', () => { }); }); it('Request Headers should contain "Accept" header', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, expectedHeaders); @@ -107,14 +117,20 @@ describe('util - browser', () => { }); }); describe('and custom headers are set', () => { - let customHeaders: Record; - beforeEach(()=>{ + let customHeaders: Record; + beforeEach(() => { customHeaders = { aHeader: 'aValue', bHeader: 'bValue' }; const exporterTimeout = 10000; - sendWithXhr(body, url, customHeaders, exporterTimeout, onSuccessStub, onErrorStub); + sendWithXhr( + body, + url, + customHeaders, + exporterTimeout, + onSuccessStub, + onErrorStub + ); }); it('Request Headers should contain "Content-Type" header', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, expectedHeaders); @@ -123,7 +139,6 @@ describe('util - browser', () => { }); }); it('Request Headers should contain "Accept" header', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, expectedHeaders); @@ -132,7 +147,6 @@ describe('util - browser', () => { }); }); it('Request Headers should contain custom headers', done => { - nextTick(() => { const { requestHeaders } = server.requests[0]; ensureHeadersContain(requestHeaders, customHeaders); diff --git a/experimental/packages/otlp-exporter-base/test/common/CollectorExporter.test.ts b/experimental/packages/otlp-exporter-base/test/common/CollectorExporter.test.ts index 30c16d1882..e3b36e704d 100644 --- a/experimental/packages/otlp-exporter-base/test/common/CollectorExporter.test.ts +++ b/experimental/packages/otlp-exporter-base/test/common/CollectorExporter.test.ts @@ -22,9 +22,8 @@ import { OTLPExporterConfigBase } from '../../src/types'; import { ComplexTestObject, mockedComplexTestObject } from '../testHelper'; import * as otlpTypes from '../../src/types'; - interface ExportRequest { - resourceSpans: object[] + resourceSpans: object[]; } type CollectorExporterConfig = OTLPExporterConfigBase; @@ -32,7 +31,7 @@ class OTLPTraceExporter extends OTLPExporterBase< CollectorExporterConfig, ComplexTestObject, ExportRequest - > { +> { onInit() {} onShutdown() {} send( @@ -51,9 +50,7 @@ class OTLPTraceExporter extends OTLPExporterBase< return config.url || ''; } - convert( - spans: ComplexTestObject[] - ): ExportRequest { + convert(spans: ComplexTestObject[]): ExportRequest { return { resourceSpans: [] }; } } @@ -120,7 +117,7 @@ describe('OTLPTraceExporter - common', () => { describe('when exporter is shutdown', () => { it( 'should not export anything but return callback with code' + - ' "FailedNotRetryable"', + ' "FailedNotRetryable"', async () => { const spans: ComplexTestObject[] = []; spans.push(Object.assign({}, mockedComplexTestObject)); @@ -200,10 +197,7 @@ describe('OTLPTraceExporter - common', () => { describe('shutdown', () => { let onShutdownSpy: any; beforeEach(() => { - onShutdownSpy = sinon.stub( - OTLPTraceExporter.prototype, - 'onShutdown' - ); + onShutdownSpy = sinon.stub(OTLPTraceExporter.prototype, 'onShutdown'); collectorExporterConfig = { hostname: 'foo', url: 'http://foo.bar.com', diff --git a/experimental/packages/otlp-exporter-base/test/node/util.test.ts b/experimental/packages/otlp-exporter-base/test/node/util.test.ts index 56dc66f2ee..7d5b03d3eb 100644 --- a/experimental/packages/otlp-exporter-base/test/node/util.test.ts +++ b/experimental/packages/otlp-exporter-base/test/node/util.test.ts @@ -17,8 +17,8 @@ import * as assert from 'assert'; import { configureExporterTimeout, invalidTimeout } from '../../src/util'; import { sendWithHttp } from '../../src/platform/node/util'; -import { CompressionAlgorithm} from '../../src/platform/node/types'; -import { configureCompression} from '../../src/platform/node/util'; +import { CompressionAlgorithm } from '../../src/platform/node/types'; +import { configureCompression } from '../../src/platform/node/util'; import { diag } from '@opentelemetry/api'; import * as sinon from 'sinon'; @@ -50,7 +50,7 @@ class HttpRequest extends PassThrough { // Barebones exporter for use by sendWithHttp type ExporterConfig = OTLPExporterNodeConfigBase; -class Exporter extends OTLPExporterNodeBase { +class Exporter extends OTLPExporterNodeBase { getDefaultUrl(config: ExporterConfig): string { return config.url || ''; } @@ -147,20 +147,32 @@ describe('configureCompression', () => { const envSource = process.env; it('should return none for compression', () => { const compression = CompressionAlgorithm.NONE; - assert.strictEqual(configureCompression(compression), CompressionAlgorithm.NONE); + assert.strictEqual( + configureCompression(compression), + CompressionAlgorithm.NONE + ); }); it('should return gzip compression defined via env', () => { envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION = 'gzip'; - assert.strictEqual(configureCompression(undefined),CompressionAlgorithm.GZIP); + assert.strictEqual( + configureCompression(undefined), + CompressionAlgorithm.GZIP + ); delete envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION; }); it('should return none for compression defined via env', () => { envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION = 'none'; - assert.strictEqual(configureCompression(undefined),CompressionAlgorithm.NONE); + assert.strictEqual( + configureCompression(undefined), + CompressionAlgorithm.NONE + ); delete envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION; }); it('should return none for compression when no compression is set', () => { - assert.strictEqual(configureCompression(undefined),CompressionAlgorithm.NONE); + assert.strictEqual( + configureCompression(undefined), + CompressionAlgorithm.NONE + ); }); }); @@ -171,12 +183,11 @@ describe('sendWithHttp', () => { let setHeaderSpy: sinon.SinonSpy; const spanData: object = { - 'foo': 'bar', - 'bar': 'baz', + foo: 'bar', + bar: 'baz', }; beforeEach(() => { - // Create stub of http.request (used by sendWithHttp) httpRequestStub = sinon.stub(http, 'request'); @@ -195,7 +206,7 @@ describe('sendWithHttp', () => { httpRequestStub.returns(mockRequest).callsArgWith(1, response); }); - afterEach(function() { + afterEach(function () { httpRequestStub.restore(); setHeaderSpy.restore(); }); @@ -209,17 +220,23 @@ describe('sendWithHttp', () => { // Show that data is written to the request stream let requestData = ''; - mockRequest.on('data', chunk => requestData += chunk); + mockRequest.on('data', chunk => (requestData += chunk)); mockRequest.on('end', () => { assert.strictEqual(requestData, data); }); - sendWithHttp(exporter, data, 'application/json', () => { - // Show that we aren't setting the gzip encoding header - assert(setHeaderSpy.withArgs('Content-Encoding', 'gzip').notCalled); - }, (err: OTLPExporterError) => { - assert.fail(err); - }); + sendWithHttp( + exporter, + data, + 'application/json', + () => { + // Show that we aren't setting the gzip encoding header + assert(setHeaderSpy.withArgs('Content-Encoding', 'gzip').notCalled); + }, + (err: OTLPExporterError) => { + assert.fail(err); + } + ); }); it('should send with gzip compression if configured to do so', () => { @@ -238,12 +255,18 @@ describe('sendWithHttp', () => { assert(Buffer.concat(buffers).equals(compressedData)); }); - sendWithHttp(exporter, data, 'application/json', () => { - // Show that we are setting the gzip encoding header - assert(setHeaderSpy.withArgs('Content-Encoding', 'gzip').calledOnce); - }, (err: OTLPExporterError) => { - assert.fail(err); - }); + sendWithHttp( + exporter, + data, + 'application/json', + () => { + // Show that we are setting the gzip encoding header + assert(setHeaderSpy.withArgs('Content-Encoding', 'gzip').calledOnce); + }, + (err: OTLPExporterError) => { + assert.fail(err); + } + ); }); it('should work with gzip compression enabled even after multiple requests', () => { @@ -274,12 +297,18 @@ describe('sendWithHttp', () => { assert(Buffer.concat(buffers).equals(compressedData)); }); - sendWithHttp(exporter, data, 'application/json', () => { - // Show that we are setting the gzip encoding header - assert(setHeaderSpy.withArgs('Content-Encoding', 'gzip').calledOnce); - }, (err: OTLPExporterError) => { - assert.fail(err); - }); + sendWithHttp( + exporter, + data, + 'application/json', + () => { + // Show that we are setting the gzip encoding header + assert(setHeaderSpy.withArgs('Content-Encoding', 'gzip').calledOnce); + }, + (err: OTLPExporterError) => { + assert.fail(err); + } + ); } }); }); diff --git a/experimental/packages/otlp-exporter-base/test/testHelper.ts b/experimental/packages/otlp-exporter-base/test/testHelper.ts index 4656cf17fc..41b0c95882 100644 --- a/experimental/packages/otlp-exporter-base/test/testHelper.ts +++ b/experimental/packages/otlp-exporter-base/test/testHelper.ts @@ -16,7 +16,7 @@ import { HrTime } from '@opentelemetry/api'; import * as assert from 'assert'; -export interface SimpleTestObject{ +export interface SimpleTestObject { readonly propString: string; readonly propNumber: number; readonly propArray: number[]; @@ -38,31 +38,31 @@ export const mockedComplexTestObject: ComplexTestObject = { { propArray: [1, 2, 3], propNumber: 42, - propString: 'this is a string.' + propString: 'this is a string.', }, { propArray: [3, 2, 1], propNumber: 3, - propString: 'this is a second string.' - } + propString: 'this is a second string.', + }, ], propBoolean: true, propFunction: () => { return { propArray: [30, 20, 10], propNumber: 24, - propString: 'created by function' + propString: 'created by function', }; }, propObject: { propArray: [4, 3, 77], propNumber: 44, - propString: 'this is a string that is part of propObject.' + propString: 'this is a string that is part of propObject.', }, propNumber: 12, propOptional: undefined, propString: 'this is just a string in a complex test object.', - propTime: [12, 455] + propTime: [12, 455], }; export function ensureHeadersContain( diff --git a/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts b/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts index 87d7e01787..884505daa8 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts +++ b/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts @@ -24,7 +24,10 @@ import { import { ServiceClient } from './types'; import { getEnv, baggageUtils } from '@opentelemetry/core'; import { configureCompression, GrpcCompressionAlgorithm } from './util'; -import { OTLPExporterBase, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'; +import { + OTLPExporterBase, + OTLPExporterError, +} from '@opentelemetry/otlp-exporter-base'; /** * OTLP Exporter abstract base class @@ -48,7 +51,9 @@ export abstract class OTLPGRPCExporterNodeBase< if (config.headers) { diag.warn('Headers cannot be set when using grpc'); } - const headers = baggageUtils.parseKeyPairsIntoRecord(getEnv().OTEL_EXPORTER_OTLP_HEADERS); + const headers = baggageUtils.parseKeyPairsIntoRecord( + getEnv().OTEL_EXPORTER_OTLP_HEADERS + ); this.metadata = config.metadata || new Metadata(); for (const [k, v] of Object.entries(headers)) { this.metadata.set(k, v); @@ -63,8 +68,7 @@ export abstract class OTLPGRPCExporterNodeBase< ): void { const promise = new Promise((resolve, reject) => { this._send(this, objects, resolve, reject); - }) - .then(onSuccess, onError); + }).then(onSuccess, onError); this._sendingPromises.push(promise); const popPromise = () => { diff --git a/experimental/packages/otlp-grpc-exporter-base/src/index.ts b/experimental/packages/otlp-grpc-exporter-base/src/index.ts index 8dfb3543e7..2669033a46 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/index.ts +++ b/experimental/packages/otlp-grpc-exporter-base/src/index.ts @@ -16,4 +16,8 @@ export * from './OTLPGRPCExporterNodeBase'; export { ServiceClientType, OTLPGRPCExporterConfigNode } from './types'; -export { DEFAULT_COLLECTOR_URL, validateAndNormalizeUrl, GrpcCompressionAlgorithm } from './util'; +export { + DEFAULT_COLLECTOR_URL, + validateAndNormalizeUrl, + GrpcCompressionAlgorithm, +} from './util'; diff --git a/experimental/packages/otlp-grpc-exporter-base/src/types.ts b/experimental/packages/otlp-grpc-exporter-base/src/types.ts index 74c1e7ccf0..fa3fea4ab1 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/types.ts +++ b/experimental/packages/otlp-grpc-exporter-base/src/types.ts @@ -15,7 +15,11 @@ */ import * as grpc from '@grpc/grpc-js'; -import { CompressionAlgorithm, OTLPExporterConfigBase, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'; +import { + CompressionAlgorithm, + OTLPExporterConfigBase, + OTLPExporterError, +} from '@opentelemetry/otlp-exporter-base'; /** * Queue item to be used to save temporary spans/metrics in case the GRPC service @@ -42,8 +46,7 @@ export interface ServiceClient extends grpc.Client { /** * OTLP Exporter Config for Node */ -export interface OTLPGRPCExporterConfigNode - extends OTLPExporterConfigBase { +export interface OTLPGRPCExporterConfigNode extends OTLPExporterConfigBase { credentials?: grpc.ChannelCredentials; metadata?: grpc.Metadata; compression?: CompressionAlgorithm; diff --git a/experimental/packages/otlp-grpc-exporter-base/src/util.ts b/experimental/packages/otlp-grpc-exporter-base/src/util.ts index 1d3791bd38..9a42327167 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/util.ts +++ b/experimental/packages/otlp-grpc-exporter-base/src/util.ts @@ -22,8 +22,16 @@ import * as path from 'path'; import { OTLPGRPCExporterNodeBase } from './OTLPGRPCExporterNodeBase'; import { URL } from 'url'; import * as fs from 'fs'; -import { GRPCQueueItem, OTLPGRPCExporterConfigNode, ServiceClientType, } from './types'; -import { CompressionAlgorithm, ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'; +import { + GRPCQueueItem, + OTLPGRPCExporterConfigNode, + ServiceClientType, +} from './types'; +import { + CompressionAlgorithm, + ExportServiceError, + OTLPExporterError, +} from '@opentelemetry/otlp-exporter-base'; export const DEFAULT_COLLECTOR_URL = 'http://localhost:4317'; @@ -33,7 +41,10 @@ export function onInit( ): void { collector.grpcQueue = []; - const credentials: grpc.ChannelCredentials = configureSecurity(config.credentials, collector.getUrlFromConfig(config)); + const credentials: grpc.ChannelCredentials = configureSecurity( + config.credentials, + collector.getUrlFromConfig(config) + ); const includeDirs = [path.resolve(__dirname, '..', 'protos')]; @@ -49,21 +60,23 @@ export function onInit( .then(packageDefinition => { const packageObject: any = grpc.loadPackageDefinition(packageDefinition); - const options = { 'grpc.default_compression_algorithm': collector.compression }; + const options = { + 'grpc.default_compression_algorithm': collector.compression, + }; if (collector.getServiceClientType() === ServiceClientType.SPANS) { collector.serviceClient = new packageObject.opentelemetry.proto.collector.trace.v1.TraceService( collector.url, credentials, - options, + options ); } else { collector.serviceClient = new packageObject.opentelemetry.proto.collector.metrics.v1.MetricsService( collector.url, credentials, - options, + options ); } @@ -124,23 +137,25 @@ export function validateAndNormalizeUrl(url: string): string { ); } if (target.protocol !== '' && !target.protocol?.match(/^(http)s?:$/)) { - diag.warn( - 'URL protocol should be http(s)://. Using http://.' - ); + diag.warn('URL protocol should be http(s)://. Using http://.'); } return target.host; } -export function configureSecurity(credentials: grpc.ChannelCredentials | undefined, endpoint: string): - grpc.ChannelCredentials { - +export function configureSecurity( + credentials: grpc.ChannelCredentials | undefined, + endpoint: string +): grpc.ChannelCredentials { let insecure: boolean; if (credentials) { return credentials; } else if (endpoint.startsWith('https://')) { insecure = false; - } else if (endpoint.startsWith('http://') || endpoint === DEFAULT_COLLECTOR_URL) { + } else if ( + endpoint.startsWith('http://') || + endpoint === DEFAULT_COLLECTOR_URL + ) { insecure = true; } else { insecure = getSecurityFromEnv(); @@ -170,7 +185,11 @@ export function useSecureConnection(): grpc.ChannelCredentials { const privateKeyPath = retrievePrivateKey(); const certChainPath = retrieveCertChain(); - return grpc.credentials.createSsl(rootCertPath, privateKeyPath, certChainPath); + return grpc.credentials.createSsl( + rootCertPath, + privateKeyPath, + certChainPath + ); } function retrieveRootCert(): Buffer | undefined { @@ -224,7 +243,9 @@ function retrieveCertChain(): Buffer | undefined { } } -function toGrpcCompression(compression: CompressionAlgorithm): GrpcCompressionAlgorithm { +function toGrpcCompression( + compression: CompressionAlgorithm +): GrpcCompressionAlgorithm { if (compression === CompressionAlgorithm.NONE) return GrpcCompressionAlgorithm.NONE; else if (compression === CompressionAlgorithm.GZIP) @@ -237,15 +258,21 @@ function toGrpcCompression(compression: CompressionAlgorithm): GrpcCompressionAl */ export enum GrpcCompressionAlgorithm { NONE = 0, - GZIP = 2 + GZIP = 2, } -export function configureCompression(compression: CompressionAlgorithm | undefined): GrpcCompressionAlgorithm { +export function configureCompression( + compression: CompressionAlgorithm | undefined +): GrpcCompressionAlgorithm { if (compression) { return toGrpcCompression(compression); } else { - const definedCompression = getEnv().OTEL_EXPORTER_OTLP_TRACES_COMPRESSION || getEnv().OTEL_EXPORTER_OTLP_COMPRESSION; + const definedCompression = + getEnv().OTEL_EXPORTER_OTLP_TRACES_COMPRESSION || + getEnv().OTEL_EXPORTER_OTLP_COMPRESSION; - return definedCompression === 'gzip' ? GrpcCompressionAlgorithm.GZIP : GrpcCompressionAlgorithm.NONE; + return definedCompression === 'gzip' + ? GrpcCompressionAlgorithm.GZIP + : GrpcCompressionAlgorithm.NONE; } } diff --git a/experimental/packages/otlp-grpc-exporter-base/test/traceHelper.ts b/experimental/packages/otlp-grpc-exporter-base/test/traceHelper.ts index e8b0204d61..8aef302d05 100644 --- a/experimental/packages/otlp-grpc-exporter-base/test/traceHelper.ts +++ b/experimental/packages/otlp-grpc-exporter-base/test/traceHelper.ts @@ -20,25 +20,16 @@ import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; import * as grpc from '@grpc/grpc-js'; import { VERSION } from '@opentelemetry/core'; -import { IEvent, IKeyValue, ILink, IResource, ISpan } from '@opentelemetry/otlp-transformer'; +import { + IEvent, + IKeyValue, + ILink, + IResource, + ISpan, +} from '@opentelemetry/otlp-transformer'; const traceIdArr = [ - 31, - 16, - 8, - 220, - 142, - 39, - 14, - 133, - 196, - 10, - 13, - 124, - 57, - 57, - 178, - 120, + 31, 16, 8, 220, 142, 39, 14, 133, 196, 10, 13, 124, 57, 57, 178, 120, ]; const spanIdArr = [94, 16, 114, 97, 246, 79, 165, 62]; const parentIdArr = [120, 168, 145, 80, 152, 134, 67, 136]; @@ -92,11 +83,13 @@ export const mockedReadableSpan: ReadableSpan = { }, ], duration: [0, 8885000], - resource: Resource.default().merge(new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - })), + resource: Resource.default().merge( + new Resource({ + service: 'ui', + version: 1, + cost: 112.12, + }) + ), instrumentationLibrary: { name: 'default', version: '0.0.1' }, }; @@ -257,32 +250,32 @@ export function ensureResourceIsCorrect(resource: IResource) { assert.deepStrictEqual(resource, { attributes: [ { - 'key': 'service.name', - 'value': { - 'stringValue': `unknown_service:${process.argv0}`, - 'value': 'stringValue' - } + key: 'service.name', + value: { + stringValue: `unknown_service:${process.argv0}`, + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.language', - 'value': { - 'stringValue': 'nodejs', - 'value': 'stringValue' - } + key: 'telemetry.sdk.language', + value: { + stringValue: 'nodejs', + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.name', - 'value': { - 'stringValue': 'opentelemetry', - 'value': 'stringValue' - } + key: 'telemetry.sdk.name', + value: { + stringValue: 'opentelemetry', + value: 'stringValue', + }, }, { - 'key': 'telemetry.sdk.version', - 'value': { - 'stringValue': VERSION, - 'value': 'stringValue' - } + key: 'telemetry.sdk.version', + value: { + stringValue: VERSION, + value: 'stringValue', + }, }, { key: 'service', diff --git a/experimental/packages/otlp-grpc-exporter-base/test/util.test.ts b/experimental/packages/otlp-grpc-exporter-base/test/util.test.ts index c583317895..c25f086f8c 100644 --- a/experimental/packages/otlp-grpc-exporter-base/test/util.test.ts +++ b/experimental/packages/otlp-grpc-exporter-base/test/util.test.ts @@ -19,7 +19,14 @@ import * as assert from 'assert'; import { diag } from '@opentelemetry/api'; import * as grpc from '@grpc/grpc-js'; -import { validateAndNormalizeUrl, configureCompression, GrpcCompressionAlgorithm, configureSecurity, useSecureConnection, DEFAULT_COLLECTOR_URL } from '../src/util'; +import { + validateAndNormalizeUrl, + configureCompression, + GrpcCompressionAlgorithm, + configureSecurity, + useSecureConnection, + DEFAULT_COLLECTOR_URL, +} from '../src/util'; import { CompressionAlgorithm } from '@opentelemetry/otlp-exporter-base'; // Tests added to detect breakage released in #2130 @@ -69,7 +76,7 @@ describe('validateAndNormalizeUrl()', () => { it(test.name, () => { const diagWarn = sinon.stub(diag, 'warn'); try { - assert.strictEqual(validateAndNormalizeUrl(test.input), (test.expected)); + assert.strictEqual(validateAndNormalizeUrl(test.input), test.expected); if (test.warn) { sinon.assert.calledWith(diagWarn, test.warn); } else { @@ -90,13 +97,16 @@ describe('utils - configureSecurity', () => { }); it('should return user defined channel credentials', () => { const userDefinedCredentials = grpc.credentials.createSsl(); - const credentials = configureSecurity(userDefinedCredentials, 'http://foo.bar'); + const credentials = configureSecurity( + userDefinedCredentials, + 'http://foo.bar' + ); assert.ok(userDefinedCredentials === credentials); assert.ok(credentials._isSecure() === true); }); it('should return secure channel when endpoint contains https scheme - no matter insecure env settings,', () => { - envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE='true'; + envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE = 'true'; const credentials = configureSecurity(undefined, 'https://foo.bar'); assert.ok(credentials._isSecure() === true); delete envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE; @@ -111,25 +121,25 @@ describe('utils - configureSecurity', () => { assert.ok(credentials._isSecure() === true); }); it('should return insecure channel when endpoint contains http scheme and insecure env set to false', () => { - envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE='false'; + envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE = 'false'; const credentials = configureSecurity(undefined, 'http://foo.bar'); assert.ok(credentials._isSecure() === false); delete envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE; }); it('should return insecure channel when endpoint contains http scheme and insecure env set to true', () => { - envSource.OTEL_EXPORTER_OTLP_INSECURE='true'; + envSource.OTEL_EXPORTER_OTLP_INSECURE = 'true'; const credentials = configureSecurity(undefined, 'http://localhost'); assert.ok(credentials._isSecure() === false); delete envSource.OTEL_EXPORTER_OTLP_INSECURE; }); it('should return secure channel when endpoint does not contain scheme and insecure env set to false', () => { - envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE='false'; + envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE = 'false'; const credentials = configureSecurity(undefined, 'foo.bar'); assert.ok(credentials._isSecure() === true); delete envSource.OTEL_EXPORTER_OTLP_TRACES_INSECURE; }); it('should return insecure channel when endpoint does not contain scheme and insecure env set to true', () => { - envSource.OTEL_EXPORTER_OTLP_INSECURE='true'; + envSource.OTEL_EXPORTER_OTLP_INSECURE = 'true'; const credentials = configureSecurity(undefined, 'foo.bar'); assert.ok(credentials._isSecure() === false); delete envSource.OTEL_EXPORTER_OTLP_INSECURE; @@ -139,9 +149,10 @@ describe('utils - configureSecurity', () => { describe('useSecureConnection', () => { const envSource = process.env; it('should return secure connection using all credentials', () => { - envSource.OTEL_EXPORTER_OTLP_CERTIFICATE='./test/certs/ca.crt'; - envSource.OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY='./test/certs/client.key'; - envSource.OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE='./test/certs/client.crt'; + envSource.OTEL_EXPORTER_OTLP_CERTIFICATE = './test/certs/ca.crt'; + envSource.OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY = './test/certs/client.key'; + envSource.OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE = + './test/certs/client.crt'; const credentials = useSecureConnection(); assert.ok(credentials._isSecure() === true); @@ -151,13 +162,13 @@ describe('useSecureConnection', () => { delete envSource.OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE; }); it('should return secure connection using only root certificate', () => { - envSource.OTEL_EXPORTER_OTLP_CERTIFICATE='./test/certs/ca.crt'; + envSource.OTEL_EXPORTER_OTLP_CERTIFICATE = './test/certs/ca.crt'; const credentials = useSecureConnection(); assert.ok(credentials._isSecure() === true); delete envSource.OTEL_EXPORTER_OTLP_CERTIFICATE; }); it('should warn user when file cannot be read and use default root certificate', () => { - envSource.OTEL_EXPORTER_OTLP_CERTIFICATE='./wrongpath/test/certs/ca.crt'; + envSource.OTEL_EXPORTER_OTLP_CERTIFICATE = './wrongpath/test/certs/ca.crt'; const diagWarn = sinon.stub(diag, 'warn'); const credentials = useSecureConnection(); const args = diagWarn.args[0]; @@ -175,19 +186,31 @@ describe('configureCompression', () => { const envSource = process.env; it('should return none for compression', () => { const compression = CompressionAlgorithm.NONE; - assert.strictEqual(configureCompression(compression), GrpcCompressionAlgorithm.NONE); + assert.strictEqual( + configureCompression(compression), + GrpcCompressionAlgorithm.NONE + ); }); it('should return gzip compression defined via env', () => { envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION = 'gzip'; - assert.strictEqual(configureCompression(undefined),GrpcCompressionAlgorithm.GZIP); + assert.strictEqual( + configureCompression(undefined), + GrpcCompressionAlgorithm.GZIP + ); delete envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION; }); it('should return none for compression defined via env', () => { envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION = 'none'; - assert.strictEqual(configureCompression(undefined),GrpcCompressionAlgorithm.NONE); + assert.strictEqual( + configureCompression(undefined), + GrpcCompressionAlgorithm.NONE + ); delete envSource.OTEL_EXPORTER_OTLP_TRACES_COMPRESSION; }); it('should return none for compression when no compression is set', () => { - assert.strictEqual(configureCompression(undefined),GrpcCompressionAlgorithm.NONE); + assert.strictEqual( + configureCompression(undefined), + GrpcCompressionAlgorithm.NONE + ); }); }); diff --git a/experimental/packages/otlp-proto-exporter-base/.eslintignore b/experimental/packages/otlp-proto-exporter-base/.eslintignore index 378eac25d3..345f1a599e 100644 --- a/experimental/packages/otlp-proto-exporter-base/.eslintignore +++ b/experimental/packages/otlp-proto-exporter-base/.eslintignore @@ -1 +1,2 @@ build +src/generated diff --git a/experimental/packages/otlp-proto-exporter-base/src/OTLPProtoExporterNodeBase.ts b/experimental/packages/otlp-proto-exporter-base/src/OTLPProtoExporterNodeBase.ts index c08e2b5ef8..1d458c5cf6 100644 --- a/experimental/packages/otlp-proto-exporter-base/src/OTLPProtoExporterNodeBase.ts +++ b/experimental/packages/otlp-proto-exporter-base/src/OTLPProtoExporterNodeBase.ts @@ -20,14 +20,16 @@ import { OTLPExporterNodeBase as OTLPExporterBaseMain, CompressionAlgorithm, OTLPExporterError, - OTLPExporterNodeConfigBase + OTLPExporterNodeConfigBase, } from '@opentelemetry/otlp-exporter-base'; -type SendFn = (collector: OTLPProtoExporterNodeBase, +type SendFn = ( + collector: OTLPProtoExporterNodeBase, objects: ExportItem[], compression: CompressionAlgorithm, onSuccess: () => void, - onError: (error: OTLPExporterError) => void) => void; + onError: (error: OTLPExporterError) => void +) => void; /** * Collector Exporter abstract base class @@ -49,8 +51,7 @@ export abstract class OTLPProtoExporterNodeBase< ): void { const promise = new Promise((resolve, reject) => { this._send(this, objects, this.compression, resolve, reject); - }) - .then(onSuccess, onError); + }).then(onSuccess, onError); this._sendingPromises.push(promise); const popPromise = () => { diff --git a/experimental/packages/otlp-proto-exporter-base/src/util.ts b/experimental/packages/otlp-proto-exporter-base/src/util.ts index dcb4fa1c73..1d262cd74b 100644 --- a/experimental/packages/otlp-proto-exporter-base/src/util.ts +++ b/experimental/packages/otlp-proto-exporter-base/src/util.ts @@ -19,7 +19,7 @@ import { OTLPProtoExporterNodeBase } from './OTLPProtoExporterNodeBase'; import { CompressionAlgorithm, OTLPExporterError, - sendWithHttp + sendWithHttp, } from '@opentelemetry/otlp-exporter-base'; import type * as protobuf from 'protobufjs'; import * as root from './generated/root'; @@ -27,16 +27,18 @@ import * as root from './generated/root'; export interface ExportRequestType unknown }> { create(properties?: T): R; encode(message: T, writer?: protobuf.Writer): protobuf.Writer; - decode(reader: (protobuf.Reader | Uint8Array), length?: number): R; + decode(reader: protobuf.Reader | Uint8Array, length?: number): R; } export function getExportRequestProto( - clientType: ServiceClientType, + clientType: ServiceClientType ): ExportRequestType { if (clientType === ServiceClientType.SPANS) { - return root.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest as unknown as ExportRequestType; + return root.opentelemetry.proto.collector.trace.v1 + .ExportTraceServiceRequest as unknown as ExportRequestType; } else { - return root.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest as unknown as ExportRequestType; + return root.opentelemetry.proto.collector.metrics.v1 + .ExportMetricsServiceRequest as unknown as ExportRequestType; } } @@ -49,7 +51,9 @@ export function send( ): void { const serviceRequest = collector.convert(objects); - const exportRequestType = getExportRequestProto(collector.getServiceClientType()); + const exportRequestType = getExportRequestProto( + collector.getServiceClientType() + ); const message = exportRequestType.create(serviceRequest); if (message) { const body = exportRequestType.encode(message).finish(); diff --git a/experimental/packages/otlp-transformer/src/common/internal.ts b/experimental/packages/otlp-transformer/src/common/internal.ts index 7221f4dce6..a5ba7f3c2d 100644 --- a/experimental/packages/otlp-transformer/src/common/internal.ts +++ b/experimental/packages/otlp-transformer/src/common/internal.ts @@ -16,16 +16,11 @@ import type { SpanAttributes } from '@opentelemetry/api'; import { IAnyValue, IKeyValue } from './types'; -export function toAttributes( - attributes: SpanAttributes -): IKeyValue[] { +export function toAttributes(attributes: SpanAttributes): IKeyValue[] { return Object.keys(attributes).map(key => toKeyValue(key, attributes[key])); } -export function toKeyValue( - key: string, - value: unknown -): IKeyValue { +export function toKeyValue(key: string, value: unknown): IKeyValue { return { key: key, value: toAnyValue(value), @@ -41,8 +36,16 @@ export function toAnyValue(value: unknown): IAnyValue { } if (t === 'boolean') return { boolValue: value as boolean }; if (value instanceof Uint8Array) return { bytesValue: value }; - if (Array.isArray(value)) return { arrayValue: { values: value.map(toAnyValue) } }; - if (t === 'object' && value != null) return { kvlistValue: { values: Object.entries(value as object).map(([k, v]) => toKeyValue(k, v)) } }; + if (Array.isArray(value)) + return { arrayValue: { values: value.map(toAnyValue) } }; + if (t === 'object' && value != null) + return { + kvlistValue: { + values: Object.entries(value as object).map(([k, v]) => + toKeyValue(k, v) + ), + }, + }; return {}; } diff --git a/experimental/packages/otlp-transformer/src/common/types.ts b/experimental/packages/otlp-transformer/src/common/types.ts index c64d9820e2..8b30231e74 100644 --- a/experimental/packages/otlp-transformer/src/common/types.ts +++ b/experimental/packages/otlp-transformer/src/common/types.ts @@ -17,7 +17,7 @@ /** Properties of an InstrumentationScope. */ export interface IInstrumentationScope { /** InstrumentationScope name */ - name: string + name: string; /** InstrumentationScope version */ version?: string; @@ -35,16 +35,16 @@ export interface IKeyValue { /** Properties of an AnyValue. */ export interface IAnyValue { /** AnyValue stringValue */ - stringValue?: (string | null); + stringValue?: string | null; /** AnyValue boolValue */ - boolValue?: (boolean | null); + boolValue?: boolean | null; /** AnyValue intValue */ - intValue?: (number | null); + intValue?: number | null; /** AnyValue doubleValue */ - doubleValue?: (number | null); + doubleValue?: number | null; /** AnyValue arrayValue */ arrayValue?: IArrayValue; diff --git a/experimental/packages/otlp-transformer/src/metrics/index.ts b/experimental/packages/otlp-transformer/src/metrics/index.ts index 26d910b165..5cef667c7e 100644 --- a/experimental/packages/otlp-transformer/src/metrics/index.ts +++ b/experimental/packages/otlp-transformer/src/metrics/index.ts @@ -17,8 +17,10 @@ import type { ResourceMetrics } from '@opentelemetry/sdk-metrics'; import type { IExportMetricsServiceRequest } from './types'; import { toResourceMetrics } from './internal'; -export function createExportMetricsServiceRequest(resourceMetrics: ResourceMetrics[]): IExportMetricsServiceRequest { +export function createExportMetricsServiceRequest( + resourceMetrics: ResourceMetrics[] +): IExportMetricsServiceRequest { return { - resourceMetrics: resourceMetrics.map(metrics => toResourceMetrics(metrics)) + resourceMetrics: resourceMetrics.map(metrics => toResourceMetrics(metrics)), }; } diff --git a/experimental/packages/otlp-transformer/src/metrics/internal.ts b/experimental/packages/otlp-transformer/src/metrics/internal.ts index 7aa9c82acd..64012a5100 100644 --- a/experimental/packages/otlp-transformer/src/metrics/internal.ts +++ b/experimental/packages/otlp-transformer/src/metrics/internal.ts @@ -22,7 +22,7 @@ import { Histogram, MetricData, ResourceMetrics, - ScopeMetrics + ScopeMetrics, } from '@opentelemetry/sdk-metrics'; import { toAttributes } from '../common/internal'; import { @@ -31,32 +31,36 @@ import { IMetric, INumberDataPoint, IResourceMetrics, - IScopeMetrics + IScopeMetrics, } from './types'; -export function toResourceMetrics(resourceMetrics: ResourceMetrics): IResourceMetrics { +export function toResourceMetrics( + resourceMetrics: ResourceMetrics +): IResourceMetrics { return { resource: { attributes: toAttributes(resourceMetrics.resource.attributes), - droppedAttributesCount: 0 + droppedAttributesCount: 0, }, schemaUrl: undefined, // TODO: Schema Url does not exist yet in the SDK. - scopeMetrics: toScopeMetrics(resourceMetrics.scopeMetrics) + scopeMetrics: toScopeMetrics(resourceMetrics.scopeMetrics), }; } export function toScopeMetrics(scopeMetrics: ScopeMetrics[]): IScopeMetrics[] { - return Array.from(scopeMetrics.map(metrics => { - const scopeMetrics: IScopeMetrics = { - scope: { - name: metrics.scope.name, - version: metrics.scope.version, - }, - metrics: metrics.metrics.map(metricData => toMetric(metricData)), - schemaUrl: metrics.scope.schemaUrl - }; - return scopeMetrics; - })); + return Array.from( + scopeMetrics.map(metrics => { + const scopeMetrics: IScopeMetrics = { + scope: { + name: metrics.scope.name, + version: metrics.scope.version, + }, + metrics: metrics.metrics.map(metricData => toMetric(metricData)), + schemaUrl: metrics.scope.schemaUrl, + }; + return scopeMetrics; + }) + ); } export function toMetric(metricData: MetricData): IMetric { @@ -66,38 +70,39 @@ export function toMetric(metricData: MetricData): IMetric { unit: metricData.descriptor.unit, }; - const aggregationTemporality = toAggregationTemporality(metricData.aggregationTemporality); + const aggregationTemporality = toAggregationTemporality( + metricData.aggregationTemporality + ); if (metricData.dataPointType === DataPointType.SUM) { out.sum = { aggregationTemporality, isMonotonic: metricData.isMonotonic, - dataPoints: toSingularDataPoints(metricData) + dataPoints: toSingularDataPoints(metricData), }; } else if (metricData.dataPointType === DataPointType.GAUGE) { // Instrument is a gauge. out.gauge = { - dataPoints: toSingularDataPoints(metricData) + dataPoints: toSingularDataPoints(metricData), }; } else if (metricData.dataPointType === DataPointType.HISTOGRAM) { out.histogram = { aggregationTemporality, - dataPoints: toHistogramDataPoints(metricData) + dataPoints: toHistogramDataPoints(metricData), }; } return out; } -function toSingularDataPoint(dataPoint: DataPoint | DataPoint, valueType: ValueType) { +function toSingularDataPoint( + dataPoint: DataPoint | DataPoint, + valueType: ValueType +) { const out: INumberDataPoint = { attributes: toAttributes(dataPoint.attributes), - startTimeUnixNano: hrTimeToNanoseconds( - dataPoint.startTime - ), - timeUnixNano: hrTimeToNanoseconds( - dataPoint.endTime - ), + startTimeUnixNano: hrTimeToNanoseconds(dataPoint.startTime), + timeUnixNano: hrTimeToNanoseconds(dataPoint.endTime), }; if (valueType === ValueType.INT) { @@ -109,17 +114,13 @@ function toSingularDataPoint(dataPoint: DataPoint | DataPoint return out; } -function toSingularDataPoints( - metricData: MetricData -): INumberDataPoint[] { +function toSingularDataPoints(metricData: MetricData): INumberDataPoint[] { return metricData.dataPoints.map(dataPoint => { return toSingularDataPoint(dataPoint, metricData.descriptor.valueType); }); } -function toHistogramDataPoints( - metricData: MetricData -): IHistogramDataPoint[] { +function toHistogramDataPoints(metricData: MetricData): IHistogramDataPoint[] { return metricData.dataPoints.map(dataPoint => { const histogram = dataPoint.value as Histogram; return { @@ -131,15 +132,13 @@ function toHistogramDataPoints( min: histogram.min, max: histogram.max, startTimeUnixNano: hrTimeToNanoseconds(dataPoint.startTime), - timeUnixNano: hrTimeToNanoseconds( - dataPoint.endTime - ), + timeUnixNano: hrTimeToNanoseconds(dataPoint.endTime), }; }); } function toAggregationTemporality( - temporality: AggregationTemporality, + temporality: AggregationTemporality ): EAggregationTemporality { if (temporality === AggregationTemporality.DELTA) { return EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA; diff --git a/experimental/packages/otlp-transformer/src/metrics/types.ts b/experimental/packages/otlp-transformer/src/metrics/types.ts index 41870304d4..d3efd29c2e 100644 --- a/experimental/packages/otlp-transformer/src/metrics/types.ts +++ b/experimental/packages/otlp-transformer/src/metrics/types.ts @@ -18,19 +18,17 @@ import { IResource } from '../resource/types'; /** Properties of an ExportMetricsServiceRequest. */ export interface IExportMetricsServiceRequest { - /** ExportMetricsServiceRequest resourceMetrics */ - resourceMetrics: IResourceMetrics[] + resourceMetrics: IResourceMetrics[]; } /** Properties of a ResourceMetrics. */ export interface IResourceMetrics { - /** ResourceMetrics resource */ resource?: IResource; /** ResourceMetrics scopeMetrics */ - scopeMetrics: IScopeMetrics[] + scopeMetrics: IScopeMetrics[]; /** ResourceMetrics schemaUrl */ schemaUrl?: string; @@ -38,7 +36,6 @@ export interface IResourceMetrics { /** Properties of an IScopeMetrics. */ export interface IScopeMetrics { - /** ScopeMetrics scope */ scope?: IInstrumentationScope; @@ -51,7 +48,6 @@ export interface IScopeMetrics { /** Properties of a Metric. */ export interface IMetric { - /** Metric name */ name: string; @@ -80,29 +76,28 @@ export interface IMetric { /** Properties of a Gauge. */ export interface IGauge { /** Gauge dataPoints */ - dataPoints: INumberDataPoint[] + dataPoints: INumberDataPoint[]; } /** Properties of a Sum. */ export interface ISum { - /** Sum dataPoints */ dataPoints: INumberDataPoint[]; /** Sum aggregationTemporality */ - aggregationTemporality: EAggregationTemporality + aggregationTemporality: EAggregationTemporality; /** Sum isMonotonic */ - isMonotonic?: (boolean | null); + isMonotonic?: boolean | null; } /** Properties of a Histogram. */ export interface IHistogram { /** Histogram dataPoints */ - dataPoints: IHistogramDataPoint[] + dataPoints: IHistogramDataPoint[]; /** Histogram aggregationTemporality */ - aggregationTemporality?: EAggregationTemporality + aggregationTemporality?: EAggregationTemporality; } /** Properties of an ExponentialHistogram. */ @@ -122,9 +117,8 @@ export interface ISummary { /** Properties of a NumberDataPoint. */ export interface INumberDataPoint { - /** NumberDataPoint attributes */ - attributes: IKeyValue[] + attributes: IKeyValue[]; /** NumberDataPoint startTimeUnixNano */ startTimeUnixNano?: number; @@ -133,7 +127,7 @@ export interface INumberDataPoint { timeUnixNano?: number; /** NumberDataPoint asDouble */ - asDouble?: (number | null); + asDouble?: number | null; /** NumberDataPoint asInt */ asInt?: number; @@ -145,7 +139,6 @@ export interface INumberDataPoint { flags?: number; } - /** Properties of a HistogramDataPoint. */ export interface IHistogramDataPoint { /** HistogramDataPoint attributes */ @@ -164,10 +157,10 @@ export interface IHistogramDataPoint { sum?: number; /** HistogramDataPoint bucketCounts */ - bucketCounts?: number[] + bucketCounts?: number[]; /** HistogramDataPoint explicitBounds */ - explicitBounds?: number[] + explicitBounds?: number[]; /** HistogramDataPoint exemplars */ exemplars?: IExemplar[]; @@ -184,7 +177,6 @@ export interface IHistogramDataPoint { /** Properties of an ExponentialHistogramDataPoint. */ export interface IExponentialHistogramDataPoint { - /** ExponentialHistogramDataPoint attributes */ attributes?: IKeyValue[]; @@ -216,10 +208,9 @@ export interface IExponentialHistogramDataPoint { flags?: number; /** ExponentialHistogramDataPoint exemplars */ - exemplars?: IExemplar[] + exemplars?: IExemplar[]; } - /** Properties of a SummaryDataPoint. */ export interface ISummaryDataPoint { /** SummaryDataPoint attributes */ @@ -353,5 +344,5 @@ export const enum EAggregationTemporality { CUMULATIVE is valid, it is not recommended. This may cause problems for systems that do not use start_time to determine when the aggregation value was reset (e.g. Prometheus). */ - AGGREGATION_TEMPORALITY_CUMULATIVE = 2 + AGGREGATION_TEMPORALITY_CUMULATIVE = 2, } diff --git a/experimental/packages/otlp-transformer/src/trace/index.ts b/experimental/packages/otlp-transformer/src/trace/index.ts index 0601f017a9..8d20181221 100644 --- a/experimental/packages/otlp-transformer/src/trace/index.ts +++ b/experimental/packages/otlp-transformer/src/trace/index.ts @@ -17,11 +17,18 @@ import type { Resource } from '@opentelemetry/resources'; import type { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import { toAttributes } from '../common/internal'; import { sdkSpanToOtlpSpan } from './internal'; -import { IExportTraceServiceRequest, IResourceSpans, IScopeSpans } from './types'; +import { + IExportTraceServiceRequest, + IResourceSpans, + IScopeSpans, +} from './types'; -export function createExportTraceServiceRequest(spans: ReadableSpan[], useHex?: boolean): IExportTraceServiceRequest { +export function createExportTraceServiceRequest( + spans: ReadableSpan[], + useHex?: boolean +): IExportTraceServiceRequest { return { - resourceSpans: spanRecordsToResourceSpans(spans, useHex) + resourceSpans: spanRecordsToResourceSpans(spans, useHex), }; } @@ -36,7 +43,9 @@ function createResourceMap(readableSpans: ReadableSpan[]) { } // TODO this is duplicated in basic tracer. Consolidate on a common helper in core - const instrumentationLibraryKey = `${record.instrumentationLibrary.name}@${record.instrumentationLibrary.version || ''}:${record.instrumentationLibrary.schemaUrl || ''}`; + const instrumentationLibraryKey = `${record.instrumentationLibrary.name}@${ + record.instrumentationLibrary.version || '' + }:${record.instrumentationLibrary.schemaUrl || ''}`; let records = ilmMap.get(instrumentationLibraryKey); if (!records) { @@ -50,7 +59,10 @@ function createResourceMap(readableSpans: ReadableSpan[]) { return resourceMap; } -function spanRecordsToResourceSpans(readableSpans: ReadableSpan[], useHex?: boolean): IResourceSpans[] { +function spanRecordsToResourceSpans( + readableSpans: ReadableSpan[], + useHex?: boolean +): IResourceSpans[] { const resourceMap = createResourceMap(readableSpans); const out: IResourceSpans[] = []; @@ -64,13 +76,16 @@ function spanRecordsToResourceSpans(readableSpans: ReadableSpan[], useHex?: bool while (!ilmEntry.done) { const scopeSpans = ilmEntry.value; if (scopeSpans.length > 0) { - const { name, version, schemaUrl } = scopeSpans[0].instrumentationLibrary; - const spans = scopeSpans.map(readableSpan => sdkSpanToOtlpSpan(readableSpan, useHex)); + const { name, version, schemaUrl } = + scopeSpans[0].instrumentationLibrary; + const spans = scopeSpans.map(readableSpan => + sdkSpanToOtlpSpan(readableSpan, useHex) + ); scopeResourceSpans.push({ scope: { name, version }, spans: spans, - schemaUrl: schemaUrl + schemaUrl: schemaUrl, }); } ilmEntry = ilmIterator.next(); @@ -82,7 +97,7 @@ function spanRecordsToResourceSpans(readableSpans: ReadableSpan[], useHex?: bool droppedAttributesCount: 0, }, scopeSpans: scopeResourceSpans, - schemaUrl: undefined + schemaUrl: undefined, }; out.push(transformedSpans); diff --git a/experimental/packages/otlp-transformer/src/trace/internal.ts b/experimental/packages/otlp-transformer/src/trace/internal.ts index 87a8182c3f..bebc41c4e8 100644 --- a/experimental/packages/otlp-transformer/src/trace/internal.ts +++ b/experimental/packages/otlp-transformer/src/trace/internal.ts @@ -20,16 +20,17 @@ import { toAttributes } from '../common/internal'; import { EStatusCode, IEvent, ILink, ISpan } from './types'; import * as core from '@opentelemetry/core'; -export function sdkSpanToOtlpSpan( - span: ReadableSpan, - useHex?: boolean -): ISpan { +export function sdkSpanToOtlpSpan(span: ReadableSpan, useHex?: boolean): ISpan { const ctx = span.spanContext(); const status = span.status; - const parentSpanId = useHex? span.parentSpanId : span.parentSpanId != null? core.hexToBase64(span.parentSpanId): undefined; + const parentSpanId = useHex + ? span.parentSpanId + : span.parentSpanId != null + ? core.hexToBase64(span.parentSpanId) + : undefined; return { - traceId: useHex? ctx.traceId : core.hexToBase64(ctx.traceId), - spanId: useHex? ctx.spanId : core.hexToBase64(ctx.spanId), + traceId: useHex ? ctx.traceId : core.hexToBase64(ctx.traceId), + spanId: useHex ? ctx.spanId : core.hexToBase64(ctx.spanId), parentSpanId: parentSpanId, name: span.name, // Span kind is offset by 1 because the API does not define a value for unset @@ -53,17 +54,21 @@ export function sdkSpanToOtlpSpan( export function toOtlpLink(link: Link, useHex?: boolean): ILink { return { attributes: link.attributes ? toAttributes(link.attributes) : [], - spanId: useHex? link.context.spanId : core.hexToBase64(link.context.spanId), - traceId: useHex? link.context.traceId : core.hexToBase64(link.context.traceId), + spanId: useHex + ? link.context.spanId + : core.hexToBase64(link.context.spanId), + traceId: useHex + ? link.context.traceId + : core.hexToBase64(link.context.traceId), droppedAttributesCount: 0, }; } -export function toOtlpSpanEvent( - timedEvent: TimedEvent -): IEvent { +export function toOtlpSpanEvent(timedEvent: TimedEvent): IEvent { return { - attributes: timedEvent.attributes ? toAttributes(timedEvent.attributes) : [], + attributes: timedEvent.attributes + ? toAttributes(timedEvent.attributes) + : [], name: timedEvent.name, timeUnixNano: hrTimeToNanoseconds(timedEvent.time), droppedAttributesCount: 0, diff --git a/experimental/packages/otlp-transformer/src/trace/types.ts b/experimental/packages/otlp-transformer/src/trace/types.ts index a12260c7e4..08e52c7a06 100644 --- a/experimental/packages/otlp-transformer/src/trace/types.ts +++ b/experimental/packages/otlp-transformer/src/trace/types.ts @@ -19,14 +19,12 @@ import { IResource } from '../resource/types'; /** Properties of an ExportTraceServiceRequest. */ export interface IExportTraceServiceRequest { - /** ExportTraceServiceRequest resourceSpans */ resourceSpans?: IResourceSpans[]; } /** Properties of a ResourceSpans. */ export interface IResourceSpans { - /** ResourceSpans resource */ resource?: IResource; @@ -39,15 +37,14 @@ export interface IResourceSpans { /** Properties of an ScopeSpans. */ export interface IScopeSpans { - /** IScopeSpans scope */ scope?: IInstrumentationScope; /** IScopeSpans spans */ - spans?: ISpan[] + spans?: ISpan[]; /** IScopeSpans schemaUrl */ - schemaUrl?: (string | null); + schemaUrl?: string | null; } /** Properties of a Span. */ @@ -59,7 +56,7 @@ export interface ISpan { spanId: string; /** Span traceState */ - traceState?: (string | null); + traceState?: string | null; /** Span parentSpanId */ parentSpanId?: string; @@ -80,19 +77,19 @@ export interface ISpan { attributes: IKeyValue[]; /** Span droppedAttributesCount */ - droppedAttributesCount: number + droppedAttributesCount: number; /** Span events */ events: IEvent[]; /** Span droppedEventsCount */ - droppedEventsCount: number + droppedEventsCount: number; /** Span links */ links: ILink[]; /** Span droppedLinksCount */ - droppedLinksCount: number + droppedLinksCount: number; /** Span status */ status: IStatus; @@ -107,30 +104,30 @@ export enum ESpanKind { SPAN_KIND_UNSPECIFIED = 0, /** Indicates that the span represents an internal operation within an application, - * as opposed to an operation happening at the boundaries. Default value. - */ + * as opposed to an operation happening at the boundaries. Default value. + */ SPAN_KIND_INTERNAL = 1, /** Indicates that the span covers server-side handling of an RPC or other - * remote network request. - */ + * remote network request. + */ SPAN_KIND_SERVER = 2, /** Indicates that the span describes a request to some remote service. - */ + */ SPAN_KIND_CLIENT = 3, /** Indicates that the span describes a producer sending a message to a broker. - * Unlike CLIENT and SERVER, there is often no direct critical path latency relationship - * between producer and consumer spans. A PRODUCER span ends when the message was accepted - * by the broker while the logical processing of the message might span a much longer time. - */ + * Unlike CLIENT and SERVER, there is often no direct critical path latency relationship + * between producer and consumer spans. A PRODUCER span ends when the message was accepted + * by the broker while the logical processing of the message might span a much longer time. + */ SPAN_KIND_PRODUCER = 4, /** Indicates that the span describes consumer receiving a message from a broker. - * Like the PRODUCER kind, there is often no direct critical path latency relationship - * between producer and consumer spans. - */ + * Like the PRODUCER kind, there is often no direct critical path latency relationship + * between producer and consumer spans. + */ SPAN_KIND_CONSUMER = 5, } diff --git a/experimental/packages/otlp-transformer/test/common.test.ts b/experimental/packages/otlp-transformer/test/common.test.ts index 05902ca941..dfcb24d6f5 100644 --- a/experimental/packages/otlp-transformer/test/common.test.ts +++ b/experimental/packages/otlp-transformer/test/common.test.ts @@ -20,24 +20,31 @@ import * as assert from 'assert'; describe('common', () => { describe('toAnyValue', () => { it('serializes an array', () => { - const anyValue = toAnyValue([1, 'two', false, 2.5, new Uint8Array([0, 1, 2]), { somekey: 'somevalue' }]); + const anyValue = toAnyValue([ + 1, + 'two', + false, + 2.5, + new Uint8Array([0, 1, 2]), + { somekey: 'somevalue' }, + ]); assert.deepStrictEqual(anyValue, { arrayValue: { values: [ { - intValue: 1 + intValue: 1, }, { - stringValue: 'two' + stringValue: 'two', }, { - boolValue: false + boolValue: false, }, { - doubleValue: 2.5 + doubleValue: 2.5, }, { - bytesValue: new Uint8Array([0, 1, 2]) + bytesValue: new Uint8Array([0, 1, 2]), }, { kvlistValue: { @@ -45,14 +52,14 @@ describe('common', () => { { key: 'somekey', value: { - stringValue: 'somevalue' - } - } - ] - } + stringValue: 'somevalue', + }, + }, + ], + }, }, - ] - } + ], + }, }); }); }); diff --git a/experimental/packages/otlp-transformer/test/metrics.test.ts b/experimental/packages/otlp-transformer/test/metrics.test.ts index df88389c5e..325c3eee4a 100644 --- a/experimental/packages/otlp-transformer/test/metrics.test.ts +++ b/experimental/packages/otlp-transformer/test/metrics.test.ts @@ -20,7 +20,7 @@ import { DataPointType, InstrumentType, MetricData, - ResourceMetrics + ResourceMetrics, } from '@opentelemetry/sdk-metrics'; import * as assert from 'assert'; import { createExportMetricsServiceRequest } from '../src/metrics'; @@ -93,14 +93,17 @@ describe('Metrics', () => { }, { stringValue: 'attribute value 2', - } - ] + }, + ], }, }, }, ]; - function createCounterData(value: number, aggregationTemporality: AggregationTemporality): MetricData { + function createCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { return { descriptor: { description: 'this is a description', @@ -118,12 +121,15 @@ describe('Metrics', () => { startTime: START_TIME, endTime: END_TIME, attributes: ATTRIBUTES, - } - ] + }, + ], }; } - function createUpDownCounterData(value: number, aggregationTemporality: AggregationTemporality): MetricData { + function createUpDownCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { return { descriptor: { description: 'this is a description', @@ -140,13 +146,16 @@ describe('Metrics', () => { value: value, startTime: START_TIME, endTime: END_TIME, - attributes: ATTRIBUTES - } - ] + attributes: ATTRIBUTES, + }, + ], }; } - function createObservableCounterData(value: number, aggregationTemporality: AggregationTemporality): MetricData { + function createObservableCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { return { descriptor: { description: 'this is a description', @@ -164,12 +173,15 @@ describe('Metrics', () => { startTime: START_TIME, endTime: END_TIME, attributes: ATTRIBUTES, - } - ] + }, + ], }; } - function createObservableUpDownCounterData(value: number, aggregationTemporality: AggregationTemporality): MetricData { + function createObservableUpDownCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { return { descriptor: { description: 'this is a description', @@ -187,12 +199,11 @@ describe('Metrics', () => { startTime: START_TIME, endTime: END_TIME, attributes: ATTRIBUTES, - } - ] + }, + ], }; } - function createObservableGaugeData(value: number): MetricData { return { descriptor: { @@ -210,17 +221,20 @@ describe('Metrics', () => { startTime: START_TIME, endTime: END_TIME, attributes: ATTRIBUTES, - } - ] + }, + ], }; } - function createHistogramMetrics(count: number, + function createHistogramMetrics( + count: number, sum: number, boundaries: number[], - counts: number[], aggregationTemporality: AggregationTemporality, + counts: number[], + aggregationTemporality: AggregationTemporality, min?: number, - max?: number): MetricData { + max?: number + ): MetricData { return { descriptor: { description: 'this is a description', @@ -240,14 +254,14 @@ describe('Metrics', () => { max: max, buckets: { boundaries: boundaries, - counts: counts - } + counts: counts, + }, }, startTime: START_TIME, endTime: END_TIME, attributes: ATTRIBUTES, - } - ] + }, + ], }; } @@ -257,22 +271,23 @@ describe('Metrics', () => { }); return { resource: resource, - scopeMetrics: - [ - { - scope: { - name: 'mylib', - version: '0.1.0', - schemaUrl: expectedSchemaUrl - }, - metrics: metricData, - } - ] + scopeMetrics: [ + { + scope: { + name: 'mylib', + version: '0.1.0', + schemaUrl: expectedSchemaUrl, + }, + metrics: metricData, + }, + ], }; } it('serializes a monotonic sum metric record', () => { - const metrics = createResourceMetrics([createCounterData(10, AggregationTemporality.DELTA)]); + const metrics = createResourceMetrics([ + createCounterData(10, AggregationTemporality.DELTA), + ]); const exportRequest = createExportMetricsServiceRequest([metrics]); assert.ok(exportRequest); @@ -299,7 +314,8 @@ describe('Metrics', () => { asInt: 10, }, ], - aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, isMonotonic: true, }, }, @@ -312,7 +328,9 @@ describe('Metrics', () => { }); it('serializes a non-monotonic sum metric record', () => { - const metrics = createResourceMetrics([createUpDownCounterData(10, AggregationTemporality.DELTA)]); + const metrics = createResourceMetrics([ + createUpDownCounterData(10, AggregationTemporality.DELTA), + ]); const exportRequest = createExportMetricsServiceRequest([metrics]); assert.ok(exportRequest); @@ -339,7 +357,8 @@ describe('Metrics', () => { asInt: 10, }, ], - aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, isMonotonic: false, }, }, @@ -352,9 +371,11 @@ describe('Metrics', () => { }); it('serializes an observable monotonic sum metric record', () => { - const exportRequest = createExportMetricsServiceRequest( - [createResourceMetrics([createObservableCounterData(10, AggregationTemporality.DELTA)])] - ); + const exportRequest = createExportMetricsServiceRequest([ + createResourceMetrics([ + createObservableCounterData(10, AggregationTemporality.DELTA), + ]), + ]); assert.ok(exportRequest); assert.deepStrictEqual(exportRequest, { @@ -380,7 +401,8 @@ describe('Metrics', () => { asInt: 10, }, ], - aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, isMonotonic: true, }, }, @@ -393,9 +415,11 @@ describe('Metrics', () => { }); it('serializes an observable non-monotonic sum metric record', () => { - const exportRequest = createExportMetricsServiceRequest( - [createResourceMetrics([createObservableUpDownCounterData(10, AggregationTemporality.DELTA)])] - ); + const exportRequest = createExportMetricsServiceRequest([ + createResourceMetrics([ + createObservableUpDownCounterData(10, AggregationTemporality.DELTA), + ]), + ]); assert.ok(exportRequest); assert.deepStrictEqual(exportRequest, { @@ -421,7 +445,8 @@ describe('Metrics', () => { asInt: 10, }, ], - aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, isMonotonic: false, }, }, @@ -434,9 +459,9 @@ describe('Metrics', () => { }); it('serializes a gauge metric record', () => { - const exportRequest = createExportMetricsServiceRequest( - [createResourceMetrics([createObservableGaugeData(10.5)])] - ); + const exportRequest = createExportMetricsServiceRequest([ + createResourceMetrics([createObservableGaugeData(10.5)]), + ]); assert.ok(exportRequest); assert.deepStrictEqual(exportRequest, { @@ -474,9 +499,19 @@ describe('Metrics', () => { describe('serializes a histogram metric record', () => { it('with min/max', () => { - const exportRequest = createExportMetricsServiceRequest( - [createResourceMetrics([createHistogramMetrics(2, 9, [5], [1, 1], AggregationTemporality.CUMULATIVE, 1, 8)])] - ); + const exportRequest = createExportMetricsServiceRequest([ + createResourceMetrics([ + createHistogramMetrics( + 2, + 9, + [5], + [1, 1], + AggregationTemporality.CUMULATIVE, + 1, + 8 + ), + ]), + ]); assert.ok(exportRequest); assert.deepStrictEqual(exportRequest, { @@ -494,7 +529,8 @@ describe('Metrics', () => { description: 'this is a description', unit: '1', histogram: { - aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE, + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE, dataPoints: [ { attributes: expectedAttributes, @@ -519,9 +555,17 @@ describe('Metrics', () => { }); it('without min/max', () => { - const exportRequest = createExportMetricsServiceRequest( - [createResourceMetrics([createHistogramMetrics(2, 9, [5], [1, 1], AggregationTemporality.CUMULATIVE)])] - ); + const exportRequest = createExportMetricsServiceRequest([ + createResourceMetrics([ + createHistogramMetrics( + 2, + 9, + [5], + [1, 1], + AggregationTemporality.CUMULATIVE + ), + ]), + ]); assert.ok(exportRequest); assert.deepStrictEqual(exportRequest, { @@ -539,7 +583,8 @@ describe('Metrics', () => { description: 'this is a description', unit: '1', histogram: { - aggregationTemporality: EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE, + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE, dataPoints: [ { attributes: expectedAttributes, diff --git a/experimental/packages/otlp-transformer/test/trace.test.ts b/experimental/packages/otlp-transformer/test/trace.test.ts index b47dcaf1e9..6d0682d3d3 100644 --- a/experimental/packages/otlp-transformer/test/trace.test.ts +++ b/experimental/packages/otlp-transformer/test/trace.test.ts @@ -18,16 +18,28 @@ import { TraceState, hexToBase64 } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import * as assert from 'assert'; -import { createExportTraceServiceRequest, ESpanKind, EStatusCode } from '../src'; +import { + createExportTraceServiceRequest, + ESpanKind, + EStatusCode, +} from '../src'; -function createExpectedSpanJson(useHex: boolean){ - const traceId = useHex? '00000000000000000000000000000001' : hexToBase64('00000000000000000000000000000001'); - const spanId = useHex? '0000000000000002' : hexToBase64('0000000000000002'); - const parentSpanId = useHex? '0000000000000001' : hexToBase64('0000000000000001'); - const linkSpanId = useHex? '0000000000000003' : hexToBase64('0000000000000003'); - const linkTraceId = useHex? '00000000000000000000000000000002' : hexToBase64('00000000000000000000000000000002'); +function createExpectedSpanJson(useHex: boolean) { + const traceId = useHex + ? '00000000000000000000000000000001' + : hexToBase64('00000000000000000000000000000001'); + const spanId = useHex ? '0000000000000002' : hexToBase64('0000000000000002'); + const parentSpanId = useHex + ? '0000000000000001' + : hexToBase64('0000000000000001'); + const linkSpanId = useHex + ? '0000000000000003' + : hexToBase64('0000000000000003'); + const linkTraceId = useHex + ? '00000000000000000000000000000002' + : hexToBase64('00000000000000000000000000000002'); - return { + return { resourceSpans: [ { resource: { @@ -59,11 +71,11 @@ function createExpectedSpanJson(useHex: boolean){ { key: 'link-attribute', value: { - stringValue: 'string value' - } - } - ] - } + stringValue: 'string value', + }, + }, + ], + }, ], // eslint-disable-next-line @typescript-eslint/no-loss-of-precision startTimeUnixNano: 1640715557342725388, @@ -76,13 +88,13 @@ function createExpectedSpanJson(useHex: boolean){ { key: 'event-attribute', value: { - stringValue: 'some string value' - } - } + stringValue: 'some string value', + }, + }, ], name: 'some event', - timeUnixNano: 1640715558542725400 - } + timeUnixNano: 1640715558542725400, + }, ], attributes: [ { @@ -134,9 +146,9 @@ describe('Trace', () => { name: 'some event', time: [1640715558, 542725388], attributes: { - 'event-attribute': 'some string value' - } - } + 'event-attribute': 'some string value', + }, + }, ], instrumentationLibrary: { name: 'myLib', @@ -151,12 +163,12 @@ describe('Trace', () => { traceId: '00000000000000000000000000000002', traceFlags: 1, isRemote: false, - traceState: new TraceState('') + traceState: new TraceState(''), }, attributes: { - 'link-attribute': 'string value' - } - } + 'link-attribute': 'string value', + }, + }, ], name: 'span-name', resource, @@ -168,7 +180,9 @@ describe('Trace', () => { }); it('returns null on an empty list', () => { - assert.deepStrictEqual(createExportTraceServiceRequest([], true), { resourceSpans: [] }); + assert.deepStrictEqual(createExportTraceServiceRequest([], true), { + resourceSpans: [], + }); }); it('serializes a span with useHex = true', () => { @@ -187,14 +201,20 @@ describe('Trace', () => { (span as any).parentSpanId = undefined; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].parentSpanId, undefined); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].parentSpanId, + undefined + ); }); it('serializes a span without a parent with useHex = false', () => { (span as any).parentSpanId = undefined; const exportRequest = createExportTraceServiceRequest([span], false); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].parentSpanId, undefined); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].parentSpanId, + undefined + ); }); describe('status code', () => { @@ -203,7 +223,8 @@ describe('Trace', () => { span.status.message = 'error message'; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - const spanStatus = exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].status; + const spanStatus = + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].status; assert.strictEqual(spanStatus?.code, EStatusCode.STATUS_CODE_ERROR); assert.strictEqual(spanStatus?.message, 'error message'); }); @@ -212,7 +233,10 @@ describe('Trace', () => { span.status.code = SpanStatusCode.UNSET; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].status.code, EStatusCode.STATUS_CODE_UNSET); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].status.code, + EStatusCode.STATUS_CODE_UNSET + ); }); }); @@ -221,31 +245,46 @@ describe('Trace', () => { (span as any).kind = SpanKind.CONSUMER; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, ESpanKind.SPAN_KIND_CONSUMER); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, + ESpanKind.SPAN_KIND_CONSUMER + ); }); it('internal', () => { (span as any).kind = SpanKind.INTERNAL; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, ESpanKind.SPAN_KIND_INTERNAL); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, + ESpanKind.SPAN_KIND_INTERNAL + ); }); it('producer', () => { (span as any).kind = SpanKind.PRODUCER; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, ESpanKind.SPAN_KIND_PRODUCER); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, + ESpanKind.SPAN_KIND_PRODUCER + ); }); it('server', () => { (span as any).kind = SpanKind.SERVER; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, ESpanKind.SPAN_KIND_SERVER); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, + ESpanKind.SPAN_KIND_SERVER + ); }); it('unspecified', () => { (span as any).kind = undefined; const exportRequest = createExportTraceServiceRequest([span], true); assert.ok(exportRequest); - assert.strictEqual(exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, ESpanKind.SPAN_KIND_UNSPECIFIED); + assert.strictEqual( + exportRequest.resourceSpans?.[0].scopeSpans[0].spans?.[0].kind, + ESpanKind.SPAN_KIND_UNSPECIFIED + ); }); }); }); diff --git a/package.json b/package.json index 080914350d..b2da03723e 100644 --- a/package.json +++ b/package.json @@ -51,13 +51,16 @@ "@typescript-eslint/eslint-plugin": "5.3.1", "@typescript-eslint/parser": "5.3.1", "eslint": "8.22.0", + "eslint-config-prettier": "8.5.0", "eslint-plugin-header": "3.1.1", "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", "gh-pages": "4.0.0", "lerna": "6.0.3", "lerna-changelog": "2.2.0", "linkinator": "4.0.3", "markdownlint-cli": "0.32.2", + "prettier": "2.8.0", "semver": "7.3.5", "typedoc": "0.22.10", "typescript": "4.4.4" diff --git a/packages/opentelemetry-context-async-hooks/src/AbstractAsyncHooksContextManager.ts b/packages/opentelemetry-context-async-hooks/src/AbstractAsyncHooksContextManager.ts index 4a46f634f5..21ae7dd60e 100644 --- a/packages/opentelemetry-context-async-hooks/src/AbstractAsyncHooksContextManager.ts +++ b/packages/opentelemetry-context-async-hooks/src/AbstractAsyncHooksContextManager.ts @@ -37,7 +37,8 @@ const ADD_LISTENER_METHODS = [ ]; export abstract class AbstractAsyncHooksContextManager -implements ContextManager { + implements ContextManager +{ abstract active(): Context; abstract with ReturnType>( diff --git a/packages/opentelemetry-context-async-hooks/test/AsyncHooksContextManager.test.ts b/packages/opentelemetry-context-async-hooks/test/AsyncHooksContextManager.test.ts index 5f61505c52..da30470406 100644 --- a/packages/opentelemetry-context-async-hooks/test/AsyncHooksContextManager.test.ts +++ b/packages/opentelemetry-context-async-hooks/test/AsyncHooksContextManager.test.ts @@ -430,7 +430,7 @@ for (const contextManagerClass of [ const ee = new EventEmitter(); const context = ROOT_CONTEXT.setValue(key1, 1); const patchedEE = contextManager.bind(context, ee); - const handler = () => { }; + const handler = () => {}; patchedEE.once('test', handler); assert.strictEqual(patchedEE.listeners('test').length, 1); patchedEE.off('test', handler); @@ -502,7 +502,7 @@ for (const contextManagerClass of [ const otherContext = ROOT_CONTEXT.setValue(key1, 3); const patchedEE = otherContextManager.bind( otherContext, - contextManager.bind(context, ee), + contextManager.bind(context, ee) ); const handler = () => { assert.strictEqual(contextManager.active(), context); diff --git a/packages/opentelemetry-context-zone-peer-dep/src/ZoneContextManager.ts b/packages/opentelemetry-context-zone-peer-dep/src/ZoneContextManager.ts index 5afa99e1d6..3f5c8c71c8 100644 --- a/packages/opentelemetry-context-zone-peer-dep/src/ZoneContextManager.ts +++ b/packages/opentelemetry-context-zone-peer-dep/src/ZoneContextManager.ts @@ -65,7 +65,7 @@ export class ZoneContextManager implements ContextManager { writable: false, value: target.length, }); - return (contextWrapper as unknown) as T; + return contextWrapper as unknown as T; } /** @@ -73,7 +73,7 @@ export class ZoneContextManager implements ContextManager { * @param obj target object on which the listeners will be patched */ private _bindListener(context: Context, obj: T): T { - const target = (obj as unknown) as TargetWithEvents; + const target = obj as unknown as TargetWithEvents; if (target.__ot_listeners !== undefined) { return obj; } @@ -140,12 +140,7 @@ export class ZoneContextManager implements ContextManager { ) { const contextManager = this; - return function ( - this: TargetWithEvents, - event, - listener, - opts - ) { + return function (this: TargetWithEvents, event, listener, opts) { if (target.__ot_listeners === undefined) { target.__ot_listeners = {}; } @@ -217,7 +212,7 @@ export class ZoneContextManager implements ContextManager { } else if (isListenerObject(target)) { this._bindListener(context, target); } - return (target as unknown) as T; + return target as unknown as T; } /** diff --git a/packages/opentelemetry-context-zone-peer-dep/test/ZoneContextManager.test.ts b/packages/opentelemetry-context-zone-peer-dep/test/ZoneContextManager.test.ts index 542221be59..aea0dacb27 100644 --- a/packages/opentelemetry-context-zone-peer-dep/test/ZoneContextManager.test.ts +++ b/packages/opentelemetry-context-zone-peer-dep/test/ZoneContextManager.test.ts @@ -269,16 +269,14 @@ describe('ZoneContextManager', () => { it('should return current context (when enabled)', done => { const context = ROOT_CONTEXT.setValue(key1, { a: 1 }); - const fn: any = contextManager.bind( - context, - () => { - assert.strictEqual( - contextManager.active(), - context, - 'should have context' - ); - return done(); - }); + const fn: any = contextManager.bind(context, () => { + assert.strictEqual( + contextManager.active(), + context, + 'should have context' + ); + return done(); + }); fn(); }); diff --git a/packages/opentelemetry-core/src/baggage/propagation/W3CBaggagePropagator.ts b/packages/opentelemetry-core/src/baggage/propagation/W3CBaggagePropagator.ts index 3283a105a7..8a20dedcaa 100644 --- a/packages/opentelemetry-core/src/baggage/propagation/W3CBaggagePropagator.ts +++ b/packages/opentelemetry-core/src/baggage/propagation/W3CBaggagePropagator.ts @@ -28,13 +28,9 @@ import { BAGGAGE_HEADER, BAGGAGE_ITEMS_SEPARATOR, BAGGAGE_MAX_NAME_VALUE_PAIRS, - BAGGAGE_MAX_PER_NAME_VALUE_PAIRS + BAGGAGE_MAX_PER_NAME_VALUE_PAIRS, } from '../constants'; -import { - getKeyPairs, - parsePairKeyValue, - serializeKeyPairs -} from '../utils'; +import { getKeyPairs, parsePairKeyValue, serializeKeyPairs } from '../utils'; /** * Propagates {@link Baggage} through Context format propagation. @@ -59,7 +55,9 @@ export class W3CBaggagePropagator implements TextMapPropagator { extract(context: Context, carrier: unknown, getter: TextMapGetter): Context { const headerValue = getter.get(carrier, BAGGAGE_HEADER); - const baggageString = Array.isArray(headerValue) ? headerValue.join(BAGGAGE_ITEMS_SEPARATOR) : headerValue; + const baggageString = Array.isArray(headerValue) + ? headerValue.join(BAGGAGE_ITEMS_SEPARATOR) + : headerValue; if (!baggageString) return context; const baggage: Record = {}; if (baggageString.length === 0) { diff --git a/packages/opentelemetry-core/src/baggage/utils.ts b/packages/opentelemetry-core/src/baggage/utils.ts index 390b143ca7..991b8a89a6 100644 --- a/packages/opentelemetry-core/src/baggage/utils.ts +++ b/packages/opentelemetry-core/src/baggage/utils.ts @@ -13,7 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Baggage, BaggageEntryMetadata, baggageEntryMetadataFromString } from '@opentelemetry/api'; +import { + Baggage, + BaggageEntryMetadata, + baggageEntryMetadataFromString, +} from '@opentelemetry/api'; import { BAGGAGE_ITEMS_SEPARATOR, BAGGAGE_PROPERTIES_SEPARATOR, @@ -21,11 +25,16 @@ import { BAGGAGE_MAX_TOTAL_LENGTH, } from './constants'; -type ParsedBaggageKeyValue = { key: string, value: string, metadata: BaggageEntryMetadata | undefined }; +type ParsedBaggageKeyValue = { + key: string; + value: string; + metadata: BaggageEntryMetadata | undefined; +}; export function serializeKeyPairs(keyPairs: string[]): string { return keyPairs.reduce((hValue: string, current: string) => { - const value = `${hValue}${hValue !== '' ? BAGGAGE_ITEMS_SEPARATOR : '' + const value = `${hValue}${ + hValue !== '' ? BAGGAGE_ITEMS_SEPARATOR : '' }${current}`; return value.length > BAGGAGE_MAX_TOTAL_LENGTH ? hValue : value; }, ''); @@ -45,7 +54,9 @@ export function getKeyPairs(baggage: Baggage): string[] { }); } -export function parsePairKeyValue(entry: string): ParsedBaggageKeyValue | undefined { +export function parsePairKeyValue( + entry: string +): ParsedBaggageKeyValue | undefined { const valueProps = entry.split(BAGGAGE_PROPERTIES_SEPARATOR); if (valueProps.length <= 0) return; const keyPairPart = valueProps.shift(); @@ -67,7 +78,9 @@ export function parsePairKeyValue(entry: string): ParsedBaggageKeyValue | undefi * Parse a string serialized in the baggage HTTP Format (without metadata): * https://github.com/w3c/baggage/blob/master/baggage/HTTP_HEADER_FORMAT.md */ -export function parseKeyPairsIntoRecord(value?: string): Record { +export function parseKeyPairsIntoRecord( + value?: string +): Record { if (typeof value !== 'string' || value.length === 0) return {}; return value .split(BAGGAGE_ITEMS_SEPARATOR) diff --git a/packages/opentelemetry-core/src/common/anchored-clock.ts b/packages/opentelemetry-core/src/common/anchored-clock.ts index a47fe796b7..b9f17d81c1 100644 --- a/packages/opentelemetry-core/src/common/anchored-clock.ts +++ b/packages/opentelemetry-core/src/common/anchored-clock.ts @@ -21,7 +21,6 @@ export interface Clock { now(): number; } - /** * A utility for returning wall times anchored to a given point in time. Wall time measurements will * not be taken from the system, but instead are computed by adding a monotonic clock time diff --git a/packages/opentelemetry-core/src/common/time.ts b/packages/opentelemetry-core/src/common/time.ts index 52a3fc0fd1..64bc64d2d9 100644 --- a/packages/opentelemetry-core/src/common/time.ts +++ b/packages/opentelemetry-core/src/common/time.ts @@ -47,7 +47,7 @@ function numberToHrtime(epochMillis: number): api.HrTime { function getTimeOrigin(): number { let timeOrigin = performance.timeOrigin; if (typeof timeOrigin !== 'number') { - const perf: TimeOriginLegacy = (performance as unknown) as TimeOriginLegacy; + const perf: TimeOriginLegacy = performance as unknown as TimeOriginLegacy; timeOrigin = perf.timing && perf.timing.fetchStart; } return timeOrigin; @@ -174,7 +174,9 @@ export function isTimeInputHrTime(value: unknown): value is api.HrTime { * check if input value is a correct types.TimeInput * @param value */ -export function isTimeInput(value: unknown): value is api.HrTime | number | Date { +export function isTimeInput( + value: unknown +): value is api.HrTime | number | Date { return ( isTimeInputHrTime(value) || typeof value === 'number' || diff --git a/packages/opentelemetry-core/src/index.ts b/packages/opentelemetry-core/src/index.ts index 6c0834fe0f..247444bf58 100644 --- a/packages/opentelemetry-core/src/index.ts +++ b/packages/opentelemetry-core/src/index.ts @@ -44,5 +44,5 @@ export * from './utils/callback'; export * from './version'; import { _export } from './internal/exporter'; export const internal = { - _export + _export, }; diff --git a/packages/opentelemetry-core/src/internal/exporter.ts b/packages/opentelemetry-core/src/internal/exporter.ts index a489b35eac..28061f5ee7 100644 --- a/packages/opentelemetry-core/src/internal/exporter.ts +++ b/packages/opentelemetry-core/src/internal/exporter.ts @@ -23,10 +23,13 @@ export interface Exporter { } /** -* @internal -* Shared functionality used by Exporters while exporting data, including suppresion of Traces. -*/ -export function _export(exporter: Exporter, arg: T): Promise { + * @internal + * Shared functionality used by Exporters while exporting data, including suppresion of Traces. + */ +export function _export( + exporter: Exporter, + arg: T +): Promise { return new Promise(resolve => { // prevent downstream exporter calls from generating spans context.with(suppressTracing(context.active()), () => { diff --git a/packages/opentelemetry-core/src/platform/browser/environment.ts b/packages/opentelemetry-core/src/platform/browser/environment.ts index c752ae8a30..0c1409671b 100644 --- a/packages/opentelemetry-core/src/platform/browser/environment.ts +++ b/packages/opentelemetry-core/src/platform/browser/environment.ts @@ -26,6 +26,8 @@ import { _globalThis } from './globalThis'; * Gets the environment variables */ export function getEnv(): Required { - const globalEnv = parseEnvironment(_globalThis as typeof globalThis & RAW_ENVIRONMENT); + const globalEnv = parseEnvironment( + _globalThis as typeof globalThis & RAW_ENVIRONMENT + ); return Object.assign({}, DEFAULT_ENVIRONMENT, globalEnv); } diff --git a/packages/opentelemetry-core/src/platform/browser/globalThis.ts b/packages/opentelemetry-core/src/platform/browser/globalThis.ts index c438f2895f..b9b12b7228 100644 --- a/packages/opentelemetry-core/src/platform/browser/globalThis.ts +++ b/packages/opentelemetry-core/src/platform/browser/globalThis.ts @@ -27,8 +27,12 @@ /** only globals that common to node and browsers are allowed */ // eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef export const _globalThis: typeof globalThis = - typeof globalThis === 'object' ? globalThis : - typeof self === 'object' ? self : - typeof window === 'object' ? window : - typeof global === 'object' ? global : - {} as typeof globalThis; + typeof globalThis === 'object' + ? globalThis + : typeof self === 'object' + ? self + : typeof window === 'object' + ? window + : typeof global === 'object' + ? global + : ({} as typeof globalThis); diff --git a/packages/opentelemetry-core/src/platform/browser/sdk-info.ts b/packages/opentelemetry-core/src/platform/browser/sdk-info.ts index 9366e21fbc..b804cd3c85 100644 --- a/packages/opentelemetry-core/src/platform/browser/sdk-info.ts +++ b/packages/opentelemetry-core/src/platform/browser/sdk-info.ts @@ -24,6 +24,7 @@ import { export const SDK_INFO = { [SemanticResourceAttributes.TELEMETRY_SDK_NAME]: 'opentelemetry', [SemanticResourceAttributes.PROCESS_RUNTIME_NAME]: 'browser', - [SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: TelemetrySdkLanguageValues.WEBJS, + [SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: + TelemetrySdkLanguageValues.WEBJS, [SemanticResourceAttributes.TELEMETRY_SDK_VERSION]: VERSION, }; diff --git a/packages/opentelemetry-core/src/trace/W3CTraceContextPropagator.ts b/packages/opentelemetry-core/src/trace/W3CTraceContextPropagator.ts index 6afce4c3ed..7925e74814 100644 --- a/packages/opentelemetry-core/src/trace/W3CTraceContextPropagator.ts +++ b/packages/opentelemetry-core/src/trace/W3CTraceContextPropagator.ts @@ -20,7 +20,8 @@ import { SpanContext, TextMapGetter, TextMapPropagator, - TextMapSetter, trace, + TextMapSetter, + trace, TraceFlags, } from '@opentelemetry/api'; import { isTracingSuppressed } from './suppress-tracing'; diff --git a/packages/opentelemetry-core/src/trace/sampler/ParentBasedSampler.ts b/packages/opentelemetry-core/src/trace/sampler/ParentBasedSampler.ts index 4b2b6cec57..a3abf53bf4 100644 --- a/packages/opentelemetry-core/src/trace/sampler/ParentBasedSampler.ts +++ b/packages/opentelemetry-core/src/trace/sampler/ParentBasedSampler.ts @@ -22,7 +22,8 @@ import { SamplingResult, SpanAttributes, SpanKind, - TraceFlags, trace, + TraceFlags, + trace, } from '@opentelemetry/api'; import { globalErrorHandler } from '../../common/global-error-handler'; import { AlwaysOffSampler } from './AlwaysOffSampler'; diff --git a/packages/opentelemetry-core/src/utils/callback.ts b/packages/opentelemetry-core/src/utils/callback.ts index 1757fcfc1d..ebbad5f96b 100644 --- a/packages/opentelemetry-core/src/utils/callback.ts +++ b/packages/opentelemetry-core/src/utils/callback.ts @@ -40,11 +40,10 @@ export class BindOnceFuture< if (!this._isCalled) { this._isCalled = true; try { - Promise.resolve(this._callback.call(this._that, ...args)) - .then( - val => this._deferred.resolve(val), - err => this._deferred.reject(err) - ); + Promise.resolve(this._callback.call(this._that, ...args)).then( + val => this._deferred.resolve(val), + err => this._deferred.reject(err) + ); } catch (err) { this._deferred.reject(err); } diff --git a/packages/opentelemetry-core/src/utils/environment.ts b/packages/opentelemetry-core/src/utils/environment.ts index fcb0614c61..b576c13c8f 100644 --- a/packages/opentelemetry-core/src/utils/environment.ts +++ b/packages/opentelemetry-core/src/utils/environment.ts @@ -88,25 +88,25 @@ export type ENVIRONMENT = { OTEL_TRACES_EXPORTER?: string; OTEL_TRACES_SAMPLER_ARG?: string; OTEL_TRACES_SAMPLER?: string; - OTEL_EXPORTER_OTLP_INSECURE?: string, - OTEL_EXPORTER_OTLP_TRACES_INSECURE?: string, - OTEL_EXPORTER_OTLP_METRICS_INSECURE?: string, - OTEL_EXPORTER_OTLP_CERTIFICATE?: string, - OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE?: string, - OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE?: string, - OTEL_EXPORTER_OTLP_COMPRESSION?: string, - OTEL_EXPORTER_OTLP_TRACES_COMPRESSION?: string, - OTEL_EXPORTER_OTLP_METRICS_COMPRESSION?: string - OTEL_EXPORTER_OTLP_CLIENT_KEY?: string, - OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY?: string, - OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY?: string, - OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE?: string, - OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE?: string, - OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE?: string, - OTEL_EXPORTER_OTLP_PROTOCOL?: string, - OTEL_EXPORTER_OTLP_TRACES_PROTOCOL?: string, - OTEL_EXPORTER_OTLP_METRICS_PROTOCOL?: string, - OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE?: string + OTEL_EXPORTER_OTLP_INSECURE?: string; + OTEL_EXPORTER_OTLP_TRACES_INSECURE?: string; + OTEL_EXPORTER_OTLP_METRICS_INSECURE?: string; + OTEL_EXPORTER_OTLP_CERTIFICATE?: string; + OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE?: string; + OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE?: string; + OTEL_EXPORTER_OTLP_COMPRESSION?: string; + OTEL_EXPORTER_OTLP_TRACES_COMPRESSION?: string; + OTEL_EXPORTER_OTLP_METRICS_COMPRESSION?: string; + OTEL_EXPORTER_OTLP_CLIENT_KEY?: string; + OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY?: string; + OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY?: string; + OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE?: string; + OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE?: string; + OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE?: string; + OTEL_EXPORTER_OTLP_PROTOCOL?: string; + OTEL_EXPORTER_OTLP_TRACES_PROTOCOL?: string; + OTEL_EXPORTER_OTLP_METRICS_PROTOCOL?: string; + OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE?: string; } & ENVIRONMENT_NUMBERS & ENVIRONMENT_LISTS; @@ -154,7 +154,7 @@ export const DEFAULT_ENVIRONMENT: Required = { OTEL_SERVICE_NAME: '', OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT: DEFAULT_ATTRIBUTE_VALUE_LENGTH_LIMIT, OTEL_ATTRIBUTE_COUNT_LIMIT: DEFAULT_ATTRIBUTE_COUNT_LIMIT, - OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT: DEFAULT_ATTRIBUTE_VALUE_LENGTH_LIMIT , + OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT: DEFAULT_ATTRIBUTE_VALUE_LENGTH_LIMIT, OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT: DEFAULT_ATTRIBUTE_COUNT_LIMIT, OTEL_SPAN_EVENT_COUNT_LIMIT: 128, OTEL_SPAN_LINK_COUNT_LIMIT: 128, @@ -179,7 +179,7 @@ export const DEFAULT_ENVIRONMENT: Required = { OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', OTEL_EXPORTER_OTLP_TRACES_PROTOCOL: 'http/protobuf', OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: 'http/protobuf', - OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'cumulative' + OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'cumulative', }; /** @@ -298,7 +298,7 @@ export function parseEnvironment(values: RAW_ENVIRONMENT): ENVIRONMENT { * populating default values. */ export function getEnvWithoutDefaults(): ENVIRONMENT { - return typeof process !== 'undefined' ? - parseEnvironment(process.env as RAW_ENVIRONMENT) : - parseEnvironment(_globalThis as typeof globalThis & RAW_ENVIRONMENT); + return typeof process !== 'undefined' + ? parseEnvironment(process.env as RAW_ENVIRONMENT) + : parseEnvironment(_globalThis as typeof globalThis & RAW_ENVIRONMENT); } diff --git a/packages/opentelemetry-core/src/utils/lodash.merge.ts b/packages/opentelemetry-core/src/utils/lodash.merge.ts index 0c55c7aa47..25e092b8d6 100644 --- a/packages/opentelemetry-core/src/utils/lodash.merge.ts +++ b/packages/opentelemetry-core/src/utils/lodash.merge.ts @@ -42,7 +42,7 @@ const nativeObjectToString = objectProto.toString; * @returns {Function} Returns the new function. */ function overArg(func: Function, transform: any): any { - return function(arg: any) { + return function (arg: any) { return func(transform(arg)); }; } @@ -84,8 +84,11 @@ export function isPlainObject(value: any) { return true; } const Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; - return typeof Ctor == 'function' && Ctor instanceof Ctor && - funcToString.call(Ctor) === objectCtorString; + return ( + typeof Ctor == 'function' && + Ctor instanceof Ctor && + funcToString.call(Ctor) === objectCtorString + ); } /** @@ -127,7 +130,7 @@ function baseGetTag(value: any) { if (value == null) { return value === undefined ? undefinedTag : nullTag; } - return (symToStringTag && symToStringTag in Object(value)) + return symToStringTag && symToStringTag in Object(value) ? getRawTag(value) : objectToString(value); } diff --git a/packages/opentelemetry-core/src/utils/merge.ts b/packages/opentelemetry-core/src/utils/merge.ts index ae01a953a8..4b501b32d5 100644 --- a/packages/opentelemetry-core/src/utils/merge.ts +++ b/packages/opentelemetry-core/src/utils/merge.ts @@ -31,7 +31,10 @@ interface ObjectInto { */ export function merge(...args: any[]): any { let result: any = args.shift(); - const objects: WeakMap | undefined = new WeakMap(); + const objects: WeakMap | undefined = new WeakMap< + any, + ObjectInto[] + >(); while (args.length > 0) { result = mergeTwoObjects(result, args.shift(), 0, objects); } @@ -58,7 +61,7 @@ function mergeTwoObjects( one: any, two: any, level = 0, - objects: WeakMap, + objects: WeakMap ): any { let result: any; if (level > MAX_LEVEL) { @@ -109,7 +112,6 @@ function mergeTwoObjects( ) { delete result[key]; } else { - if (isObject(obj1) && isObject(obj2)) { const arr1 = objects.get(obj1) || []; const arr2 = objects.get(obj2) || []; @@ -145,7 +147,7 @@ function mergeTwoObjects( function wasObjectReferenced( obj: any, key: string, - objects: WeakMap, + objects: WeakMap ): boolean { const arr = objects.get(obj[key]) || []; for (let i = 0, j = arr.length; i < j; i++) { @@ -166,17 +168,24 @@ function isFunction(value: any): boolean { } function isObject(value: any): boolean { - return !isPrimitive(value) && !isArray(value) && !isFunction(value) && typeof value === 'object'; + return ( + !isPrimitive(value) && + !isArray(value) && + !isFunction(value) && + typeof value === 'object' + ); } function isPrimitive(value: any): boolean { - return typeof value === 'string' || + return ( + typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean' || typeof value === 'undefined' || value instanceof Date || value instanceof RegExp || - value === null; + value === null + ); } function shouldMerge(one: any, two: any): boolean { @@ -186,4 +195,3 @@ function shouldMerge(one: any, two: any): boolean { return true; } - diff --git a/packages/opentelemetry-core/test/baggage/W3CBaggagePropagator.test.ts b/packages/opentelemetry-core/test/baggage/W3CBaggagePropagator.test.ts index a6fcd267ea..c30bfb3014 100644 --- a/packages/opentelemetry-core/test/baggage/W3CBaggagePropagator.test.ts +++ b/packages/opentelemetry-core/test/baggage/W3CBaggagePropagator.test.ts @@ -20,7 +20,7 @@ import { defaultTextMapGetter, defaultTextMapSetter, propagation, - baggageEntryMetadataFromString + baggageEntryMetadataFromString, } from '@opentelemetry/api'; import { ROOT_CONTEXT } from '@opentelemetry/api'; import * as assert from 'assert'; @@ -42,7 +42,12 @@ describe('W3CBaggagePropagator', () => { key1: { value: 'd4cda95b652f4a1592b449d5929fda1b' }, 'with/slash': { value: 'with spaces' }, key3: { value: 'c88815a7-0fa9-4d95-a1f1-cdccce3c5c2a' }, - key4: { value: 'foo', metadata: baggageEntryMetadataFromString('key4prop1=value1;key4prop2=value2;key4prop3WithNoValue') } + key4: { + value: 'foo', + metadata: baggageEntryMetadataFromString( + 'key4prop1=value1;key4prop2=value2;key4prop3WithNoValue' + ), + }, }); httpBaggagePropagator.inject( @@ -175,7 +180,8 @@ describe('W3CBaggagePropagator', () => { }); describe('.extract()', () => { - const baggageValue = 'key1=d4cda95b,key3=c88815a7, keyn = valn, keym =valm'; + const baggageValue = + 'key1=d4cda95b,key3=c88815a7, keyn = valn, keym =valm'; const expected = propagation.createBaggage({ key1: { value: 'd4cda95b' }, key3: { value: 'c88815a7' }, @@ -210,7 +216,10 @@ describe('W3CBaggagePropagator', () => { }); it('should extract context of a sampled span when the headerValue comes as array with multiple items', () => { - carrier[BAGGAGE_HEADER] = ['key1=d4cda95b,key3=c88815a7, keyn = valn', 'keym =valm']; + carrier[BAGGAGE_HEADER] = [ + 'key1=d4cda95b,key3=c88815a7, keyn = valn', + 'keym =valm', + ]; const extractedBaggage = propagation.getBaggage( httpBaggagePropagator.extract( ROOT_CONTEXT, diff --git a/packages/opentelemetry-core/test/platform/hex-to-base64.test.ts b/packages/opentelemetry-core/test/platform/hex-to-base64.test.ts index d73409fc09..3249621072 100644 --- a/packages/opentelemetry-core/test/platform/hex-to-base64.test.ts +++ b/packages/opentelemetry-core/test/platform/hex-to-base64.test.ts @@ -26,6 +26,9 @@ describe('hexToBase64', () => { assert.strictEqual(hexToBase64(id2), 'EqvANNVn6J/ybmCMjPQsgA=='); assert.strictEqual(hexToBase64(id3), 'EqvANNVn6J/ybmCMjPQsgA=='); // Don't use the preallocated path - assert.strictEqual(hexToBase64(id2.repeat(2)), 'EqvANNVn6J/ybmCMjPQsgBKrwDTVZ+if8m5gjIz0LIA='); + assert.strictEqual( + hexToBase64(id2.repeat(2)), + 'EqvANNVn6J/ybmCMjPQsgBKrwDTVZ+if8m5gjIz0LIA=' + ); }); }); diff --git a/packages/opentelemetry-core/test/propagation/composite.test.ts b/packages/opentelemetry-core/test/propagation/composite.test.ts index 9436023584..a2bad6f5dc 100644 --- a/packages/opentelemetry-core/test/propagation/composite.test.ts +++ b/packages/opentelemetry-core/test/propagation/composite.test.ts @@ -20,7 +20,8 @@ import { TextMapPropagator, SpanContext, TextMapGetter, - TextMapSetter, trace, + TextMapSetter, + trace, } from '@opentelemetry/api'; import { Context, ROOT_CONTEXT } from '@opentelemetry/api'; import * as assert from 'assert'; diff --git a/packages/opentelemetry-core/test/test-utils.ts b/packages/opentelemetry-core/test/test-utils.ts index 002945691d..b9cacc0088 100644 --- a/packages/opentelemetry-core/test/test-utils.ts +++ b/packages/opentelemetry-core/test/test-utils.ts @@ -17,13 +17,16 @@ import * as assert from 'assert'; interface ErrorLikeConstructor { - new(): Error; + new (): Error; } /** * Node.js v8.x and browser compatible `assert.rejects`. */ -export async function assertRejects(actual: any, expected: RegExp | ErrorLikeConstructor) { +export async function assertRejects( + actual: any, + expected: RegExp | ErrorLikeConstructor +) { let rejected; try { if (typeof actual === 'function') { diff --git a/packages/opentelemetry-core/test/trace/W3CTraceContextPropagator.test.ts b/packages/opentelemetry-core/test/trace/W3CTraceContextPropagator.test.ts index 9dcdfe15b9..2df3395f50 100644 --- a/packages/opentelemetry-core/test/trace/W3CTraceContextPropagator.test.ts +++ b/packages/opentelemetry-core/test/trace/W3CTraceContextPropagator.test.ts @@ -20,7 +20,8 @@ import { INVALID_SPANID, INVALID_TRACEID, ROOT_CONTEXT, - SpanContext, trace, + SpanContext, + trace, TraceFlags, } from '@opentelemetry/api'; import * as assert from 'assert'; diff --git a/packages/opentelemetry-core/test/utils/callback.test.ts b/packages/opentelemetry-core/test/utils/callback.test.ts index 72805d0f5e..0acf2821e6 100644 --- a/packages/opentelemetry-core/test/utils/callback.test.ts +++ b/packages/opentelemetry-core/test/utils/callback.test.ts @@ -26,10 +26,7 @@ describe('callback', () => { const that = {}; const future = new BindOnceFuture(stub, that); - await Promise.all([ - future.call(1), - future.call(2), - ]); + await Promise.all([future.call(1), future.call(2)]); await future.call(3); await future.promise; diff --git a/packages/opentelemetry-core/test/utils/environment.test.ts b/packages/opentelemetry-core/test/utils/environment.test.ts index ce18b42476..2517a4e99e 100644 --- a/packages/opentelemetry-core/test/utils/environment.test.ts +++ b/packages/opentelemetry-core/test/utils/environment.test.ts @@ -38,7 +38,7 @@ export function mockEnvironment(values: RAW_ENVIRONMENT) { }); } else { Object.keys(values).forEach(key => { - ((window as unknown) as RAW_ENVIRONMENT)[key] = String(values[key]); + (window as unknown as RAW_ENVIRONMENT)[key] = String(values[key]); }); } } @@ -53,7 +53,7 @@ export function removeMockEnvironment() { }); } else { Object.keys(lastMock).forEach(key => { - delete ((window as unknown) as RAW_ENVIRONMENT)[key]; + delete (window as unknown as RAW_ENVIRONMENT)[key]; }); } lastMock = {}; diff --git a/packages/opentelemetry-core/test/utils/merge.test.ts b/packages/opentelemetry-core/test/utils/merge.test.ts index d008ce2b21..76e9ab2fe0 100644 --- a/packages/opentelemetry-core/test/utils/merge.test.ts +++ b/packages/opentelemetry-core/test/utils/merge.test.ts @@ -21,132 +21,132 @@ const tests: TestResult[] = []; tests.push({ inputs: ['1', '2'], result: '2', - desc: 'two strings' + desc: 'two strings', }); tests.push({ inputs: [1, 2], result: 2, - desc: 'two numbers' + desc: 'two numbers', }); tests.push({ inputs: [true, false], result: false, - desc: 'two booleans' + desc: 'two booleans', }); tests.push({ inputs: [false, true], result: true, - desc: 'two booleans case 2' + desc: 'two booleans case 2', }); tests.push({ inputs: [undefined, undefined], result: undefined, - desc: 'two undefined' + desc: 'two undefined', }); tests.push({ inputs: [null, null], result: null, - desc: 'two nulls' + desc: 'two nulls', }); tests.push({ inputs: ['1', 1], result: 1, - desc: 'string & number' + desc: 'string & number', }); tests.push({ inputs: ['1', false], result: false, - desc: 'string & boolean' + desc: 'string & boolean', }); tests.push({ inputs: ['1', undefined], result: undefined, - desc: 'string & undefined' + desc: 'string & undefined', }); tests.push({ inputs: ['1', null], result: null, - desc: 'string & null' + desc: 'string & null', }); tests.push({ inputs: [3, '1'], result: '1', - desc: 'number & string' + desc: 'number & string', }); tests.push({ inputs: [3, false], result: false, - desc: 'number & boolean' + desc: 'number & boolean', }); tests.push({ inputs: [3, undefined], result: undefined, - desc: 'number & undefined' + desc: 'number & undefined', }); tests.push({ inputs: [3, null], result: null, - desc: 'number & null' + desc: 'number & null', }); tests.push({ inputs: [false, '3'], result: '3', - desc: 'boolean & string' + desc: 'boolean & string', }); tests.push({ inputs: [false, 3], result: 3, - desc: 'boolean & number' + desc: 'boolean & number', }); tests.push({ inputs: [false, undefined], result: undefined, - desc: 'boolean & undefined' + desc: 'boolean & undefined', }); tests.push({ inputs: [false, null], result: null, - desc: 'boolean & null' + desc: 'boolean & null', }); tests.push({ inputs: [undefined, '1'], result: '1', - desc: 'undefined & string' + desc: 'undefined & string', }); tests.push({ inputs: [undefined, 1], result: 1, - desc: 'undefined & number' + desc: 'undefined & number', }); tests.push({ inputs: [undefined, false], result: false, - desc: 'undefined & boolean' + desc: 'undefined & boolean', }); tests.push({ inputs: [undefined, null], result: null, - desc: 'undefined & null' + desc: 'undefined & null', }); tests.push({ inputs: [null, '1'], result: '1', - desc: 'null & string' + desc: 'null & string', }); tests.push({ inputs: [null, 1], result: 1, - desc: 'null & number' + desc: 'null & number', }); tests.push({ inputs: [null, false], result: false, - desc: 'null & boolean' + desc: 'null & boolean', }); tests.push({ inputs: [null, undefined], result: undefined, - desc: 'null & undefined' + desc: 'null & undefined', }); const date1 = new Date(327164400000); @@ -154,25 +154,25 @@ const date2 = new Date(358700400000); tests.push({ inputs: [date1, date2], result: date2, - desc: 'two dates' + desc: 'two dates', }); tests.push({ inputs: [/.+/g, /.a+/g], result: /.a+/g, - desc: 'two regexp' + desc: 'two regexp', }); tests.push({ inputs: [1, { a: 1 }], result: { a: 1 }, - desc: 'primitive with object' + desc: 'primitive with object', }); tests.push({ inputs: [{ a: 1 }, 1], result: 1, - desc: 'object with primitive' + desc: 'object with primitive', }); const arrResult1: any = [1, 2, 3]; @@ -180,76 +180,91 @@ arrResult1['foo'] = 1; tests.push({ inputs: [[1, 2, 3], { foo: 1 }], result: arrResult1, - desc: 'array with object' + desc: 'array with object', }); tests.push({ inputs: [{ foo: 1 }, [1, 2, 3]], result: [1, 2, 3], - desc: 'object with array' + desc: 'object with array', }); tests.push({ - inputs: [{ a: 1, c: 1 }, { a: 2, b: 3 }], + inputs: [ + { a: 1, c: 1 }, + { a: 2, b: 3 }, + ], result: { a: 2, b: 3, c: 1 }, - desc: 'two objects' + desc: 'two objects', }); tests.push({ - inputs: [{ a: 1, c: 1 }, { a: 2, b: 3, c: { foo: 1 } }], + inputs: [ + { a: 1, c: 1 }, + { a: 2, b: 3, c: { foo: 1 } }, + ], result: { a: 2, b: 3, c: { foo: 1 } }, - desc: 'two objects 2nd with nested' + desc: 'two objects 2nd with nested', }); tests.push({ inputs: [ { a: 1, c: { bar: 1, d: { bla: 2 } } }, - { a: 2, b: 3, c: { foo: 1 } } + { a: 2, b: 3, c: { foo: 1 } }, ], result: { a: 2, b: 3, c: { bar: 1, d: { bla: 2 }, foo: 1 } }, - desc: 'two objects with nested objects' + desc: 'two objects with nested objects', }); tests.push({ - inputs: [[1, 2, 3], [4, 5]], + inputs: [ + [1, 2, 3], + [4, 5], + ], result: [1, 2, 3, 4, 5], - desc: 'two arrays with numbers' + desc: 'two arrays with numbers', }); tests.push({ - inputs: [[1, 2, 3, { foo: 1 }], [4, 5, { foo: 2 }]], + inputs: [ + [1, 2, 3, { foo: 1 }], + [4, 5, { foo: 2 }], + ], result: [1, 2, 3, { foo: 1 }, 4, 5, { foo: 2 }], - desc: 'two arrays, with number and objects' + desc: 'two arrays, with number and objects', }); tests.push({ - inputs: [{ a: 1, c: 1 }, { a: 2, b: 3 }, { a: 3, c: 2, d: 1 }], + inputs: [ + { a: 1, c: 1 }, + { a: 2, b: 3 }, + { a: 3, c: 2, d: 1 }, + ], result: { a: 3, b: 3, c: 2, d: 1 }, - desc: 'three objects' + desc: 'three objects', }); tests.push({ inputs: [ { a: 1, c: 1, foo: { bar1: 1 } }, { a: 2, b: 3, foo: { bar1: 2 } }, - { a: 3, c: 2, d: 1, foo: { bar2: 1 } } + { a: 3, c: 2, d: 1, foo: { bar2: 1 } }, ], result: { a: 3, b: 3, c: 2, d: 1, foo: { bar1: 2, bar2: 1 } }, - desc: 'three nested objects' + desc: 'three nested objects', }); tests.push({ inputs: [ { a: 1, c: { bar: 1, d: { bla: 2 } } }, - { a: 2, b: 3, c: { foo: 1, bar: undefined } } + { a: 2, b: 3, c: { foo: 1, bar: undefined } }, ], result: { a: 2, b: 3, c: { d: { bla: 2 }, foo: 1 } }, - desc: 'two objects with nested objects and undefined' + desc: 'two objects with nested objects and undefined', }); class A { - constructor(private _name: string = 'foo') { - } + constructor(private _name: string = 'foo') {} getName() { return this._name; @@ -260,7 +275,7 @@ class B extends A { constructor(name = 'foo', private _ver = 1) { super(name); } - getVer(){ + getVer() { return this._ver; } } @@ -273,19 +288,19 @@ tests.push({ { a: 1, c: 1, foo: a, foo2: { a: 1 } }, { a: 2, b: 3, foo: b, foo2: { b: 1, a: a } }, ], - result: { a: 2, b: 3, c: 1, foo: b, foo2: {a: a, b: 1} }, - desc: 'two objects with nested objects and objects created from classes' + result: { a: 2, b: 3, c: 1, foo: b, foo2: { a: a, b: 1 } }, + desc: 'two objects with nested objects and objects created from classes', }); describe('merge', () => { tests.forEach((test, index) => { - it(`should merge ${ test.desc }`, () => { + it(`should merge ${test.desc}`, () => { const result = merge(...test.inputs); assert.deepStrictEqual( result, test.result, - `test ${ index + 1 } '${ test.desc }' failed` + `test ${index + 1} '${test.desc}' failed` ); }); }); @@ -299,10 +314,13 @@ describe('merge', () => { b.b = 9; b.arr.push(5); - assert.deepStrictEqual( - result, - { a: 1, c: 2, foo: { bar1: 1, bar2: 2 }, b: 1, arr: [1, 2, 3] } - ); + assert.deepStrictEqual(result, { + a: 1, + c: 2, + foo: { bar1: 1, bar2: 2 }, + b: 1, + arr: [1, 2, 3], + }); }); it('should ignore cyclic reference', () => { @@ -312,17 +330,14 @@ describe('merge', () => { b.f = b; const result = merge(a, b); - assert.deepStrictEqual( - result, - { - a: 1, - c: 2, - foo: { bar1: 1, bar2: 2 }, - f: { a: 1, c: 2, b: 1, arr: [1, 2, 3] }, - b: 1, - arr: [1, 2, 3] - } - ); + assert.deepStrictEqual(result, { + a: 1, + c: 2, + foo: { bar1: 1, bar2: 2 }, + f: { a: 1, c: 2, b: 1, arr: [1, 2, 3] }, + b: 1, + arr: [1, 2, 3], + }); }); it('should not fail for 1 argument', () => { @@ -337,17 +352,20 @@ describe('merge', () => { it('should merge function', () => { const a = { - a: 1, b: 2 + a: 1, + b: 2, }; const b = { a: 2, - c: function() { + c: function () { return 'foo'; }, }; const result = merge(a, b); assert.deepStrictEqual(result, { - a: 2, b: 2, c: b.c + a: 2, + b: 2, + c: b.c, }); }); @@ -377,7 +395,6 @@ describe('merge', () => { } assert.deepStrictEqual(count, 19); }); - }); interface TestResult { diff --git a/packages/opentelemetry-core/test/utils/wrap.test.ts b/packages/opentelemetry-core/test/utils/wrap.test.ts index 25ef129718..5c9b91f85d 100644 --- a/packages/opentelemetry-core/test/utils/wrap.test.ts +++ b/packages/opentelemetry-core/test/utils/wrap.test.ts @@ -27,7 +27,7 @@ function makeWrapped( defineProperty(wrapper, '__wrapped', wrapped); defineProperty(wrapper, '__unwrap', unwrap); defineProperty(wrapper, '__original', original); - return (wrapper as unknown) as ShimWrapped; + return wrapper as unknown as ShimWrapped; } function defineProperty(obj: any, name: string, value: unknown) { diff --git a/packages/opentelemetry-exporter-jaeger/src/jaeger.ts b/packages/opentelemetry-exporter-jaeger/src/jaeger.ts index 0ba979db04..2f7acabe76 100644 --- a/packages/opentelemetry-exporter-jaeger/src/jaeger.ts +++ b/packages/opentelemetry-exporter-jaeger/src/jaeger.ts @@ -15,7 +15,12 @@ */ import { diag } from '@opentelemetry/api'; -import { BindOnceFuture, ExportResult, ExportResultCode, getEnv } from '@opentelemetry/core'; +import { + BindOnceFuture, + ExportResult, + ExportResultCode, + getEnv, +} from '@opentelemetry/core'; import { ReadableSpan, SpanExporter } from '@opentelemetry/sdk-trace-base'; import { Socket } from 'dgram'; import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'; @@ -85,7 +90,10 @@ export class JaegerExporter implements SpanExporter { private _shutdown(): Promise { return Promise.race([ new Promise((_resolve, reject) => { - setTimeout(() => reject(new Error('Flush timeout')), this._onShutdownFlushTimeout); + setTimeout( + () => reject(new Error('Flush timeout')), + this._onShutdownFlushTimeout + ); }), this._flush(), ]).finally(() => { @@ -126,19 +134,25 @@ export class JaegerExporter implements SpanExporter { }); } - private _getSender(span: jaegerTypes.ThriftSpan): typeof jaegerTypes.UDPSender { + private _getSender( + span: jaegerTypes.ThriftSpan + ): typeof jaegerTypes.UDPSender { if (this._sender) { return this._sender; } - const sender = this._localConfig.endpoint ? new jaegerTypes.HTTPSender(this._localConfig) : new jaegerTypes.UDPSender(this._localConfig); + const sender = this._localConfig.endpoint + ? new jaegerTypes.HTTPSender(this._localConfig) + : new jaegerTypes.UDPSender(this._localConfig); if (sender._client instanceof Socket) { // unref socket to prevent it from keeping the process running sender._client.unref(); } - const serviceNameTag = span.tags.find(t => t.key === SemanticResourceAttributes.SERVICE_NAME); + const serviceNameTag = span.tags.find( + t => t.key === SemanticResourceAttributes.SERVICE_NAME + ); const serviceName = serviceNameTag?.vStr || 'unknown_service'; sender.setProcess({ diff --git a/packages/opentelemetry-exporter-jaeger/src/transform.ts b/packages/opentelemetry-exporter-jaeger/src/transform.ts index 855722bfbe..61321e78aa 100644 --- a/packages/opentelemetry-exporter-jaeger/src/transform.ts +++ b/packages/opentelemetry-exporter-jaeger/src/transform.ts @@ -88,18 +88,16 @@ export function spanToThrift(span: ReadableSpan): ThriftSpan { const spanTags: ThriftTag[] = ThriftUtils.getThriftTags(tags); - const logs = span.events.map( - (event): Log => { - const fields: Tag[] = [{ key: 'event', value: event.name }]; - const attrs = event.attributes; - if (attrs) { - Object.keys(attrs).forEach(attr => - fields.push({ key: attr, value: toTagValue(attrs[attr]) }) - ); - } - return { timestamp: hrTimeToMilliseconds(event.time), fields }; + const logs = span.events.map((event): Log => { + const fields: Tag[] = [{ key: 'event', value: event.name }]; + const attrs = event.attributes; + if (attrs) { + Object.keys(attrs).forEach(attr => + fields.push({ key: attr, value: toTagValue(attrs[attr]) }) + ); } - ); + return { timestamp: hrTimeToMilliseconds(event.time), fields }; + }); const spanLogs: ThriftLog[] = ThriftUtils.getThriftLogs(logs); return { @@ -118,18 +116,15 @@ export function spanToThrift(span: ReadableSpan): ThriftSpan { } /** Translate OpenTelemetry {@link Link}s to Jaeger ThriftReference. */ -function spanLinksToThriftRefs( - links: Link[], -): ThriftReference[] { - return links - .map((link): ThriftReference => { - const refType = ThriftReferenceType.FOLLOWS_FROM; - const traceId = link.context.traceId; - const traceIdHigh = Utils.encodeInt64(traceId.slice(0, 16)); - const traceIdLow = Utils.encodeInt64(traceId.slice(16)); - const spanId = Utils.encodeInt64(link.context.spanId); - return { traceIdLow, traceIdHigh, spanId, refType }; - }); +function spanLinksToThriftRefs(links: Link[]): ThriftReference[] { + return links.map((link): ThriftReference => { + const refType = ThriftReferenceType.FOLLOWS_FROM; + const traceId = link.context.traceId; + const traceIdHigh = Utils.encodeInt64(traceId.slice(0, 16)); + const traceIdLow = Utils.encodeInt64(traceId.slice(16)); + const spanId = Utils.encodeInt64(link.context.spanId); + return { traceIdLow, traceIdHigh, spanId, refType }; + }); } /** Translate OpenTelemetry attribute value to Jaeger TagValue. */ diff --git a/packages/opentelemetry-exporter-jaeger/src/types.ts b/packages/opentelemetry-exporter-jaeger/src/types.ts index 4a6ec67f12..d7d62f4daa 100644 --- a/packages/opentelemetry-exporter-jaeger/src/types.ts +++ b/packages/opentelemetry-exporter-jaeger/src/types.ts @@ -37,13 +37,13 @@ export interface ExporterConfig { // udp_sender, util etc. modules. /* eslint-disable @typescript-eslint/no-var-requires */ -export const UDPSender = require('jaeger-client/dist/src/reporters/udp_sender') - .default; +export const UDPSender = + require('jaeger-client/dist/src/reporters/udp_sender').default; export const Utils = require('jaeger-client/dist/src/util').default; export const ThriftUtils = require('jaeger-client/dist/src/thrift').default; -export const HTTPSender = require('jaeger-client/dist/src/reporters/http_sender') - .default; +export const HTTPSender = + require('jaeger-client/dist/src/reporters/http_sender').default; /* eslint-enable @typescript-eslint/no-var-requires */ export type TagValue = string | number | boolean; diff --git a/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts b/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts index 738f9b9ce7..7a6130322b 100644 --- a/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts +++ b/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts @@ -47,7 +47,7 @@ describe('JaegerExporter', () => { events: [], duration: [32, 800000000], resource: new Resource({ - [SemanticResourceAttributes.SERVICE_NAME]: 'opentelemetry' + [SemanticResourceAttributes.SERVICE_NAME]: 'opentelemetry', }), instrumentationLibrary: { name: 'default', @@ -67,16 +67,17 @@ describe('JaegerExporter', () => { it('should get service name from the the service name resource attribute of the first exported span', done => { const mockedEndpoint = 'http://testendpoint'; - const scope =nock(mockedEndpoint) - .post('/') - .reply(202); + const scope = nock(mockedEndpoint).post('/').reply(202); const exporter = new JaegerExporter({ endpoint: mockedEndpoint, }); exporter.export([readableSpan], result => { assert.strictEqual(result.code, ExportResultCode.SUCCESS); - assert.strictEqual(exporter['_sender']._batch.process.serviceName, 'opentelemetry'); + assert.strictEqual( + exporter['_sender']._batch.process.serviceName, + 'opentelemetry' + ); scope.done(); done(); }); @@ -92,10 +93,12 @@ describe('JaegerExporter', () => { assert.ok(typeof exporter.shutdown === 'function'); const process: ThriftProcess = exporter['_getSender']({ - tags: [{ - key: 'service.name', - vStr: 'opentelemetry' - }] + tags: [ + { + key: 'service.name', + vStr: 'opentelemetry', + }, + ], } as any)._process; assert.strictEqual(exporter['_sender']._host, 'remotehost'); assert.strictEqual(process.serviceName, 'opentelemetry'); @@ -108,10 +111,12 @@ describe('JaegerExporter', () => { it('should default to localhost if no host is configured', () => { const exporter = new JaegerExporter(); const sender = exporter['_getSender']({ - tags: [{ - key: 'service.name', - vStr: 'opentelemetry' - }] + tags: [ + { + key: 'service.name', + vStr: 'opentelemetry', + }, + ], } as any); assert.strictEqual(sender._host, 'localhost'); }); @@ -121,10 +126,12 @@ describe('JaegerExporter', () => { process.env.OTEL_EXPORTER_JAEGER_AGENT_PORT = '1234'; const exporter = new JaegerExporter(); const sender = exporter['_getSender']({ - tags: [{ - key: 'service.name', - vStr: 'opentelemetry' - }] + tags: [ + { + key: 'service.name', + vStr: 'opentelemetry', + }, + ], } as any); assert.strictEqual(sender._host, 'env-set-host'); assert.strictEqual(sender._port, 1234); @@ -135,13 +142,15 @@ describe('JaegerExporter', () => { process.env.OTEL_EXPORTER_JAEGER_AGENT_PORT = '1234'; const exporter = new JaegerExporter({ host: 'option-set-host', - port: 5678 + port: 5678, }); const sender = exporter['_getSender']({ - tags: [{ - key: 'service.name', - vStr: 'opentelemetry' - }] + tags: [ + { + key: 'service.name', + vStr: 'opentelemetry', + }, + ], } as any); assert.strictEqual(sender._host, 'option-set-host'); assert.strictEqual(sender._port, 5678); diff --git a/packages/opentelemetry-exporter-jaeger/test/transform.test.ts b/packages/opentelemetry-exporter-jaeger/test/transform.test.ts index d97b44ccb0..5b9de96aa5 100644 --- a/packages/opentelemetry-exporter-jaeger/test/transform.test.ts +++ b/packages/opentelemetry-exporter-jaeger/test/transform.test.ts @@ -133,9 +133,18 @@ describe('transform', () => { assert.strictEqual(thriftSpan.references.length, 1); const [reference1] = thriftSpan.references; assert.strictEqual(reference1.refType, ThriftReferenceType.FOLLOWS_FROM); - assert.strictEqual(reference1.spanId.toString('hex'), readableSpan.links[0].context.spanId); - assert.strictEqual(reference1.traceIdLow.toString('hex'), readableSpan.links[0].context.traceId.substring(16, 32)); - assert.strictEqual(reference1.traceIdHigh.toString('hex'), readableSpan.links[0].context.traceId.substring(0, 16)); + assert.strictEqual( + reference1.spanId.toString('hex'), + readableSpan.links[0].context.spanId + ); + assert.strictEqual( + reference1.traceIdLow.toString('hex'), + readableSpan.links[0].context.traceId.substring(16, 32) + ); + assert.strictEqual( + reference1.traceIdHigh.toString('hex'), + readableSpan.links[0].context.traceId.substring(0, 16) + ); assert.strictEqual(thriftSpan.logs.length, 1); const [log1] = thriftSpan.logs; diff --git a/packages/opentelemetry-exporter-zipkin/src/platform/browser/util.ts b/packages/opentelemetry-exporter-zipkin/src/platform/browser/util.ts index 96008e4172..7c5ace3588 100644 --- a/packages/opentelemetry-exporter-zipkin/src/platform/browser/util.ts +++ b/packages/opentelemetry-exporter-zipkin/src/platform/browser/util.ts @@ -28,7 +28,10 @@ import * as zipkinTypes from '../../types'; * @param headers - headers * send */ -export function prepareSend(urlStr: string, headers?: Record): zipkinTypes.SendFn { +export function prepareSend( + urlStr: string, + headers?: Record +): zipkinTypes.SendFn { let xhrHeaders: Record; const useBeacon = typeof navigator.sendBeacon === 'function' && !headers; if (headers) { diff --git a/packages/opentelemetry-exporter-zipkin/src/platform/node/util.ts b/packages/opentelemetry-exporter-zipkin/src/platform/node/util.ts index ca0af75886..5032b0c978 100644 --- a/packages/opentelemetry-exporter-zipkin/src/platform/node/util.ts +++ b/packages/opentelemetry-exporter-zipkin/src/platform/node/util.ts @@ -27,7 +27,10 @@ import * as zipkinTypes from '../../types'; * @param headers - headers * send */ -export function prepareSend(urlStr: string, headers?: Record): zipkinTypes.SendFn { +export function prepareSend( + urlStr: string, + headers?: Record +): zipkinTypes.SendFn { const urlOpts = url.parse(urlStr); const reqOpts: http.RequestOptions = Object.assign( @@ -61,7 +64,9 @@ export function prepareSend(urlStr: string, headers?: Record): z }); res.on('end', () => { const statusCode = res.statusCode || 0; - diag.debug(`Zipkin response status code: ${statusCode}, body: ${rawData}`); + diag.debug( + `Zipkin response status code: ${statusCode}, body: ${rawData}` + ); // Consider 2xx and 3xx as success. if (statusCode < 400) { diff --git a/packages/opentelemetry-exporter-zipkin/src/types.ts b/packages/opentelemetry-exporter-zipkin/src/types.ts index a544f4e9b9..ce9754d0b5 100644 --- a/packages/opentelemetry-exporter-zipkin/src/types.ts +++ b/packages/opentelemetry-exporter-zipkin/src/types.ts @@ -188,4 +188,7 @@ export type SendFunction = ( export type GetHeaders = () => Record | undefined; -export type SendFn = (zipkinSpans: Span[], done: (result: ExportResult) => void) => void; +export type SendFn = ( + zipkinSpans: Span[], + done: (result: ExportResult) => void +) => void; diff --git a/packages/opentelemetry-exporter-zipkin/src/zipkin.ts b/packages/opentelemetry-exporter-zipkin/src/zipkin.ts index 3b228b3007..2cb3e44436 100644 --- a/packages/opentelemetry-exporter-zipkin/src/zipkin.ts +++ b/packages/opentelemetry-exporter-zipkin/src/zipkin.ts @@ -45,7 +45,8 @@ export class ZipkinExporter implements SpanExporter { this._urlStr = config.url || getEnv().OTEL_EXPORTER_ZIPKIN_ENDPOINT; this._send = prepareSend(this._urlStr, config.headers); this._serviceName = config.serviceName; - this._statusCodeTagName = config.statusCodeTagName || defaultStatusCodeTagName; + this._statusCodeTagName = + config.statusCodeTagName || defaultStatusCodeTagName; this._statusDescriptionTagName = config.statusDescriptionTagName || defaultStatusErrorTagName; this._isShutdown = false; @@ -87,7 +88,6 @@ export class ZipkinExporter implements SpanExporter { }); }); - this._sendingPromises.push(promise); const popPromise = () => { const index = this._sendingPromises.indexOf(promise); diff --git a/packages/opentelemetry-exporter-zipkin/test/helper.ts b/packages/opentelemetry-exporter-zipkin/test/helper.ts index c2e13dcdd8..3cfd694e2b 100644 --- a/packages/opentelemetry-exporter-zipkin/test/helper.ts +++ b/packages/opentelemetry-exporter-zipkin/test/helper.ts @@ -55,7 +55,7 @@ export function ensureHeadersContain( assert.strictEqual( v, actual[k], - `Expected ${ actual } to contain ${ k }: ${ v }` + `Expected ${actual} to contain ${k}: ${v}` ); }); } diff --git a/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts b/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts index 1b6465a656..d1aa4e8888 100644 --- a/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts +++ b/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts @@ -313,7 +313,6 @@ describe('Zipkin Exporter - node', () => { }); }); - it('should call globalErrorHandler on error', () => { const expectedError = new Error('Whoops'); const scope = nock('http://localhost:9411') @@ -498,10 +497,7 @@ describe('Zipkin Exporter - node', () => { exporter.export([span1, span2], (result: ExportResult) => { requestBody; scope.done(); - assert.equal( - requestBody[0].localEndpoint.serviceName, - span_service_name - ); + assert.equal(requestBody[0].localEndpoint.serviceName, span_service_name); assert.equal( requestBody[1].localEndpoint.serviceName, span_service_name_prime diff --git a/packages/opentelemetry-propagator-b3/src/B3Propagator.ts b/packages/opentelemetry-propagator-b3/src/B3Propagator.ts index cc975ad49e..0997fcdb18 100644 --- a/packages/opentelemetry-propagator-b3/src/B3Propagator.ts +++ b/packages/opentelemetry-propagator-b3/src/B3Propagator.ts @@ -34,8 +34,10 @@ import { B3InjectEncoding, B3PropagatorConfig } from './types'; * Based on: https://github.com/openzipkin/b3-propagation */ export class B3Propagator implements TextMapPropagator { - private readonly _b3MultiPropagator: B3MultiPropagator = new B3MultiPropagator(); - private readonly _b3SinglePropagator: B3SinglePropagator = new B3SinglePropagator(); + private readonly _b3MultiPropagator: B3MultiPropagator = + new B3MultiPropagator(); + private readonly _b3SinglePropagator: B3SinglePropagator = + new B3SinglePropagator(); private readonly _inject: ( context: Context, carrier: unknown, diff --git a/packages/opentelemetry-propagator-b3/src/B3SinglePropagator.ts b/packages/opentelemetry-propagator-b3/src/B3SinglePropagator.ts index eb8ac46ac0..83d21ec6dd 100644 --- a/packages/opentelemetry-propagator-b3/src/B3SinglePropagator.ts +++ b/packages/opentelemetry-propagator-b3/src/B3SinglePropagator.ts @@ -21,14 +21,16 @@ import { isValidTraceId, TextMapGetter, TextMapPropagator, - TextMapSetter, trace, + TextMapSetter, + trace, TraceFlags, } from '@opentelemetry/api'; import { isTracingSuppressed } from '@opentelemetry/core'; import { B3_DEBUG_FLAG_KEY } from './common'; import { B3_CONTEXT_HEADER } from './constants'; -const B3_CONTEXT_REGEX = /((?:[0-9a-f]{16}){1,2})-([0-9a-f]{16})(?:-([01d](?![0-9a-f])))?(?:-([0-9a-f]{16}))?/; +const B3_CONTEXT_REGEX = + /((?:[0-9a-f]{16}){1,2})-([0-9a-f]{16})(?:-([01d](?![0-9a-f])))?(?:-([0-9a-f]{16}))?/; const PADDING = '0'.repeat(16); const SAMPLED_VALUES = new Set(['d', '1']); const DEBUG_STATE = 'd'; diff --git a/packages/opentelemetry-propagator-b3/test/B3Propagator.test.ts b/packages/opentelemetry-propagator-b3/test/B3Propagator.test.ts index 62d4643f45..20ebcf3cdd 100644 --- a/packages/opentelemetry-propagator-b3/test/B3Propagator.test.ts +++ b/packages/opentelemetry-propagator-b3/test/B3Propagator.test.ts @@ -153,20 +153,16 @@ describe('B3Propagator', () => { }); it('extracts multi header b3 using array getter', () => { - const context = propagator.extract( - ROOT_CONTEXT, - b3MultiCarrier, - { - get(carrier, key) { - if (carrier == null || carrier[key] === undefined) { - return []; - } - return [carrier[key]]; - }, - - keys: defaultTextMapGetter.keys - } - ); + const context = propagator.extract(ROOT_CONTEXT, b3MultiCarrier, { + get(carrier, key) { + if (carrier == null || carrier[key] === undefined) { + return []; + } + return [carrier[key]]; + }, + + keys: defaultTextMapGetter.keys, + }); const extractedSpanContext = trace.getSpanContext(context); assert.deepStrictEqual(extractedSpanContext, { diff --git a/packages/opentelemetry-propagator-jaeger/src/JaegerPropagator.ts b/packages/opentelemetry-propagator-jaeger/src/JaegerPropagator.ts index 93f08e489c..b4421e395b 100644 --- a/packages/opentelemetry-propagator-jaeger/src/JaegerPropagator.ts +++ b/packages/opentelemetry-propagator-jaeger/src/JaegerPropagator.ts @@ -49,15 +49,17 @@ export class JaegerPropagator implements TextMapPropagator { private readonly _jaegerTraceHeader: string; private readonly _jaegerBaggageHeaderPrefix: string; - constructor(customTraceHeader?: string) - constructor(config?: JaegerPropagatorConfig) + constructor(customTraceHeader?: string); + constructor(config?: JaegerPropagatorConfig); constructor(config?: JaegerPropagatorConfig | string) { if (typeof config === 'string') { this._jaegerTraceHeader = config; this._jaegerBaggageHeaderPrefix = UBER_BAGGAGE_HEADER_PREFIX; } else { - this._jaegerTraceHeader = config?.customTraceHeader || UBER_TRACE_ID_HEADER; - this._jaegerBaggageHeaderPrefix = config?.customBaggageHeaderPrefix || UBER_BAGGAGE_HEADER_PREFIX; + this._jaegerTraceHeader = + config?.customTraceHeader || UBER_TRACE_ID_HEADER; + this._jaegerBaggageHeaderPrefix = + config?.customBaggageHeaderPrefix || UBER_BAGGAGE_HEADER_PREFIX; } } @@ -114,7 +116,8 @@ export class JaegerPropagator implements TextMapPropagator { if (baggageValues.length === 0) return newContext; // if baggage values are present, inject it into the current baggage - let currentBaggage = propagation.getBaggage(context) ?? propagation.createBaggage(); + let currentBaggage = + propagation.getBaggage(context) ?? propagation.createBaggage(); for (const baggageEntry of baggageValues) { if (baggageEntry.value === undefined) continue; currentBaggage = currentBaggage.setEntry(baggageEntry.key, { @@ -145,7 +148,9 @@ function deserializeSpanContext(serializedString: string): SpanContext | null { const traceId = _traceId.padStart(32, '0'); const spanId = _spanId.padStart(16, '0'); - const traceFlags = flags.match(/^[0-9a-f]{1,2}$/i) ? parseInt(flags, 16) & 1 : 1; + const traceFlags = flags.match(/^[0-9a-f]{1,2}$/i) + ? parseInt(flags, 16) & 1 + : 1; return { traceId, spanId, isRemote: true, traceFlags }; } diff --git a/packages/opentelemetry-propagator-jaeger/src/types.ts b/packages/opentelemetry-propagator-jaeger/src/types.ts index c6bb9879e1..180b49cdc3 100644 --- a/packages/opentelemetry-propagator-jaeger/src/types.ts +++ b/packages/opentelemetry-propagator-jaeger/src/types.ts @@ -15,6 +15,6 @@ */ export interface JaegerPropagatorConfig { - customTraceHeader?: string, - customBaggageHeaderPrefix?: string, + customTraceHeader?: string; + customBaggageHeaderPrefix?: string; } diff --git a/packages/opentelemetry-propagator-jaeger/test/JaegerPropagator.test.ts b/packages/opentelemetry-propagator-jaeger/test/JaegerPropagator.test.ts index 1e21e6a218..7c6a5e885c 100644 --- a/packages/opentelemetry-propagator-jaeger/test/JaegerPropagator.test.ts +++ b/packages/opentelemetry-propagator-jaeger/test/JaegerPropagator.test.ts @@ -16,7 +16,8 @@ import { defaultTextMapGetter, - defaultTextMapSetter, propagation, + defaultTextMapSetter, + propagation, ROOT_CONTEXT, SpanContext, TextMapGetter, @@ -261,7 +262,11 @@ describe('JaegerPropagator', () => { carrier[`${customBaggageHeaderPrefix}-test`] = 'value'; carrier[`${customBaggageHeaderPrefix}-myuser`] = '%25id%25'; const extractedBaggage = propagation.getBaggage( - customJaegerPropagatorWithConfig.extract(ROOT_CONTEXT, carrier, defaultTextMapGetter) + customJaegerPropagatorWithConfig.extract( + ROOT_CONTEXT, + carrier, + defaultTextMapGetter + ) ); const firstEntry = extractedBaggage?.getEntry('test'); @@ -277,7 +282,10 @@ describe('JaegerPropagator', () => { carrier[`${UBER_BAGGAGE_HEADER_PREFIX}-myuser`] = '%25id%25'; const extractedBaggage = propagation.getBaggage( jaegerPropagator.extract( - propagation.setBaggage(ROOT_CONTEXT, propagation.createBaggage({ one: { value: 'two' } })), + propagation.setBaggage( + ROOT_CONTEXT, + propagation.createBaggage({ one: { value: 'two' } }) + ), carrier, defaultTextMapGetter ) @@ -316,18 +324,18 @@ describe('JaegerPropagator', () => { }); it('should 0-pad span and trace id from header', () => { - carrier[UBER_TRACE_ID_HEADER] = '4cda95b652f4a1592b449d5929fda1b:e0c63257de34c92:0:01'; + carrier[UBER_TRACE_ID_HEADER] = + '4cda95b652f4a1592b449d5929fda1b:e0c63257de34c92:0:01'; const extractedSpanContext = trace.getSpanContext( - jaegerPropagator.extract( - ROOT_CONTEXT, - carrier, - defaultTextMapGetter - ) + jaegerPropagator.extract(ROOT_CONTEXT, carrier, defaultTextMapGetter) ); assert.ok(extractedSpanContext); assert.equal(extractedSpanContext.spanId, '0e0c63257de34c92'); - assert.equal(extractedSpanContext.traceId, '04cda95b652f4a1592b449d5929fda1b'); + assert.equal( + extractedSpanContext.traceId, + '04cda95b652f4a1592b449d5929fda1b' + ); }); }); @@ -339,7 +347,9 @@ describe('JaegerPropagator', () => { assert.deepStrictEqual(customJaegerPropagator.fields(), [customHeader]); }); it('returns the customized header if customized with config', () => { - assert.deepStrictEqual(customJaegerPropagatorWithConfig.fields(), [customHeader]); + assert.deepStrictEqual(customJaegerPropagatorWithConfig.fields(), [ + customHeader, + ]); }); }); diff --git a/packages/opentelemetry-resources/src/detectors/BrowserDetector.ts b/packages/opentelemetry-resources/src/detectors/BrowserDetector.ts index 1f16e126e8..b9271d7e56 100644 --- a/packages/opentelemetry-resources/src/detectors/BrowserDetector.ts +++ b/packages/opentelemetry-resources/src/detectors/BrowserDetector.ts @@ -31,7 +31,7 @@ class BrowserDetector implements Detector { const browserResource: ResourceAttributes = { [SemanticResourceAttributes.PROCESS_RUNTIME_NAME]: 'browser', [SemanticResourceAttributes.PROCESS_RUNTIME_DESCRIPTION]: 'Web Browser', - [SemanticResourceAttributes.PROCESS_RUNTIME_VERSION]: navigator.userAgent + [SemanticResourceAttributes.PROCESS_RUNTIME_VERSION]: navigator.userAgent, }; return this._getResourceAttributes(browserResource, config); } @@ -61,5 +61,4 @@ class BrowserDetector implements Detector { } } - export const browserDetector = new BrowserDetector(); diff --git a/packages/opentelemetry-resources/src/detectors/EnvDetector.ts b/packages/opentelemetry-resources/src/detectors/EnvDetector.ts index b7a98ff4e1..0f5cd65209 100644 --- a/packages/opentelemetry-resources/src/detectors/EnvDetector.ts +++ b/packages/opentelemetry-resources/src/detectors/EnvDetector.ts @@ -137,7 +137,7 @@ class EnvDetector implements Detector { private _isBaggageOctetString(str: string): boolean { for (let i = 0; i < str.length; i++) { const ch = str.charCodeAt(i); - if (ch < 0x21 || ch === 0x2C || ch === 0x3B || ch === 0x5C || ch > 0x7E) { + if (ch < 0x21 || ch === 0x2c || ch === 0x3b || ch === 0x5c || ch > 0x7e) { return false; } } diff --git a/packages/opentelemetry-resources/src/detectors/ProcessDetector.ts b/packages/opentelemetry-resources/src/detectors/ProcessDetector.ts index 3c6e03d92c..304a67f358 100644 --- a/packages/opentelemetry-resources/src/detectors/ProcessDetector.ts +++ b/packages/opentelemetry-resources/src/detectors/ProcessDetector.ts @@ -36,7 +36,8 @@ class ProcessDetector implements Detector { [SemanticResourceAttributes.PROCESS_COMMAND]: process.argv[1] || '', [SemanticResourceAttributes.PROCESS_COMMAND_LINE]: process.argv.join(' ') || '', - [SemanticResourceAttributes.PROCESS_RUNTIME_VERSION]: process.versions.node, + [SemanticResourceAttributes.PROCESS_RUNTIME_VERSION]: + process.versions.node, [SemanticResourceAttributes.PROCESS_RUNTIME_NAME]: 'nodejs', [SemanticResourceAttributes.PROCESS_RUNTIME_DESCRIPTION]: 'Node.js', }; diff --git a/packages/opentelemetry-resources/src/platform/node/detect-resources.ts b/packages/opentelemetry-resources/src/platform/node/detect-resources.ts index 35bf32b4cd..abe1584dc7 100644 --- a/packages/opentelemetry-resources/src/platform/node/detect-resources.ts +++ b/packages/opentelemetry-resources/src/platform/node/detect-resources.ts @@ -52,7 +52,6 @@ export const detectResources = async ( ); }; - /** * Writes debug information about the detected resources to the logger defined in the resource detection config, if one is provided. * diff --git a/packages/opentelemetry-resources/test/Resource.test.ts b/packages/opentelemetry-resources/test/Resource.test.ts index c3780591d6..7d4bcae248 100644 --- a/packages/opentelemetry-resources/test/Resource.test.ts +++ b/packages/opentelemetry-resources/test/Resource.test.ts @@ -104,20 +104,44 @@ describe('Resource', () => { describeNode('.default()', () => { it('should return a default resource', () => { const resource = Resource.default(); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_NAME], SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_NAME]); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_VERSION], SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_VERSION]); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.SERVICE_NAME], `unknown_service:${process.argv0}`); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_NAME], + SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_NAME] + ); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], + SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE] + ); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_VERSION], + SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_VERSION] + ); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.SERVICE_NAME], + `unknown_service:${process.argv0}` + ); }); }); describeBrowser('.default()', () => { it('should return a default resource', () => { const resource = Resource.default(); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_NAME], SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_NAME]); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_VERSION], SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_VERSION]); - assert.strictEqual(resource.attributes[SemanticResourceAttributes.SERVICE_NAME], 'unknown_service'); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_NAME], + SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_NAME] + ); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], + SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE] + ); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_VERSION], + SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_VERSION] + ); + assert.strictEqual( + resource.attributes[SemanticResourceAttributes.SERVICE_NAME], + 'unknown_service' + ); }); }); }); diff --git a/packages/opentelemetry-resources/test/detectors/browser/EnvDetector.test.ts b/packages/opentelemetry-resources/test/detectors/browser/EnvDetector.test.ts index da4354f89e..0e8894ef93 100644 --- a/packages/opentelemetry-resources/test/detectors/browser/EnvDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/browser/EnvDetector.test.ts @@ -27,12 +27,15 @@ import { describeBrowser } from '../../util'; describeBrowser('envDetector() on web browser', () => { describe('with valid env', () => { before(() => { - (globalThis as typeof globalThis & RAW_ENVIRONMENT).OTEL_RESOURCE_ATTRIBUTES = + ( + globalThis as typeof globalThis & RAW_ENVIRONMENT + ).OTEL_RESOURCE_ATTRIBUTES = 'webengine.name="chromium",webengine.version="99",webengine.description="Chromium",custom.key="custom%20value"'; }); after(() => { - delete (globalThis as typeof globalThis & RAW_ENVIRONMENT).OTEL_RESOURCE_ATTRIBUTES; + delete (globalThis as typeof globalThis & RAW_ENVIRONMENT) + .OTEL_RESOURCE_ATTRIBUTES; }); it('should return resource information from environment variable', async () => { @@ -46,20 +49,20 @@ describeBrowser('envDetector() on web browser', () => { }); }); - describe('with invalid env', () => { - const values = [ - 'webengine.description="with spaces"', - ]; + const values = ['webengine.description="with spaces"']; for (const value of values) { describe(`value: '${value}'`, () => { before(() => { - (globalThis as typeof globalThis & RAW_ENVIRONMENT).OTEL_RESOURCE_ATTRIBUTES = value; + ( + globalThis as typeof globalThis & RAW_ENVIRONMENT + ).OTEL_RESOURCE_ATTRIBUTES = value; }); after(() => { - delete (globalThis as typeof globalThis & RAW_ENVIRONMENT).OTEL_RESOURCE_ATTRIBUTES; + delete (globalThis as typeof globalThis & RAW_ENVIRONMENT) + .OTEL_RESOURCE_ATTRIBUTES; }); it('should return empty resource', async () => { diff --git a/packages/opentelemetry-resources/test/detectors/browser/HostDetector.test.ts b/packages/opentelemetry-resources/test/detectors/browser/HostDetector.test.ts index 8bebadb7c4..a1299541d9 100644 --- a/packages/opentelemetry-resources/test/detectors/browser/HostDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/browser/HostDetector.test.ts @@ -15,9 +15,7 @@ */ import * as sinon from 'sinon'; import { hostDetector, Resource } from '../../../src'; -import { - assertEmptyResource, -} from '../../util/resource-assertions'; +import { assertEmptyResource } from '../../util/resource-assertions'; import { describeBrowser } from '../../util'; describeBrowser('hostDetector() on web browser', () => { diff --git a/packages/opentelemetry-resources/test/detectors/browser/OSDetector.test.ts b/packages/opentelemetry-resources/test/detectors/browser/OSDetector.test.ts index 72f0dc6fa6..991aa05271 100644 --- a/packages/opentelemetry-resources/test/detectors/browser/OSDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/browser/OSDetector.test.ts @@ -15,9 +15,7 @@ */ import * as sinon from 'sinon'; import { osDetector, Resource } from '../../../src'; -import { - assertEmptyResource, -} from '../../util/resource-assertions'; +import { assertEmptyResource } from '../../util/resource-assertions'; import { describeBrowser } from '../../util'; describeBrowser('osDetector() on web browser', () => { diff --git a/packages/opentelemetry-resources/test/detectors/browser/ProcessDetector.test.ts b/packages/opentelemetry-resources/test/detectors/browser/ProcessDetector.test.ts index 4b69b479eb..ae4c824ce0 100644 --- a/packages/opentelemetry-resources/test/detectors/browser/ProcessDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/browser/ProcessDetector.test.ts @@ -15,9 +15,7 @@ */ import * as sinon from 'sinon'; import { processDetector, Resource } from '../../../src'; -import { - assertEmptyResource, -} from '../../util/resource-assertions'; +import { assertEmptyResource } from '../../util/resource-assertions'; import { describeBrowser } from '../../util'; describeBrowser('processDetector() on web browser', () => { diff --git a/packages/opentelemetry-resources/test/detectors/node/BrowserDetector.test.ts b/packages/opentelemetry-resources/test/detectors/node/BrowserDetector.test.ts index 27d7bdfeff..73873b1e1a 100644 --- a/packages/opentelemetry-resources/test/detectors/node/BrowserDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/node/BrowserDetector.test.ts @@ -16,10 +16,7 @@ import { Resource } from '../../../src'; import { browserDetector } from '../../../src/detectors/BrowserDetector'; import { describeNode } from '../../util'; -import { - assertEmptyResource, -} from '../../util/resource-assertions'; - +import { assertEmptyResource } from '../../util/resource-assertions'; describeNode('browserDetector()', () => { it('should return empty resources if window.document is missing', async () => { @@ -27,4 +24,3 @@ describeNode('browserDetector()', () => { assertEmptyResource(resource); }); }); - diff --git a/packages/opentelemetry-resources/test/detectors/node/EnvDetector.test.ts b/packages/opentelemetry-resources/test/detectors/node/EnvDetector.test.ts index 3f6b63a1f1..1397978377 100644 --- a/packages/opentelemetry-resources/test/detectors/node/EnvDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/node/EnvDetector.test.ts @@ -38,15 +38,13 @@ describeNode('envDetector() on Node.js', () => { podName: 'pod-xyz-123', clusterName: 'c1', namespaceName: 'default', - deploymentName: 'deployment name' + deploymentName: 'deployment name', }); }); }); describe('with invalid env', () => { - const values = [ - 'k8s.deployment.name="with spaces"', - ]; + const values = ['k8s.deployment.name="with spaces"']; for (const value of values) { describe(`value: '${value}'`, () => { diff --git a/packages/opentelemetry-resources/test/detectors/node/ProcessDetector.test.ts b/packages/opentelemetry-resources/test/detectors/node/ProcessDetector.test.ts index 2357c13479..dc1a3473a4 100644 --- a/packages/opentelemetry-resources/test/detectors/node/ProcessDetector.test.ts +++ b/packages/opentelemetry-resources/test/detectors/node/ProcessDetector.test.ts @@ -32,9 +32,7 @@ describeNode('processDetector() on Node.js', () => { sinon .stub(process, 'argv') .value(['/tmp/node', '/home/ot/test.js', 'arg1', 'arg2']); - sinon - .stub(process, 'versions') - .value({'node': '1.4.1'}); + sinon.stub(process, 'versions').value({ node: '1.4.1' }); const resource: Resource = await processDetector.detect(); assertResource(resource, { diff --git a/packages/opentelemetry-resources/test/util/resource-assertions.ts b/packages/opentelemetry-resources/test/util/resource-assertions.ts index 32f37e4486..70ab2c8c33 100644 --- a/packages/opentelemetry-resources/test/util/resource-assertions.ts +++ b/packages/opentelemetry-resources/test/util/resource-assertions.ts @@ -311,17 +311,22 @@ export const assertResource = ( } if (validations.runtimeDescription) { assert.strictEqual( - resource.attributes[SemanticResourceAttributes.PROCESS_RUNTIME_DESCRIPTION], + resource.attributes[ + SemanticResourceAttributes.PROCESS_RUNTIME_DESCRIPTION + ], validations.runtimeDescription ); } }; -export const assertWebEngineResource = (resource: Resource, validations: { - name?: string; - version?: string; - description?: string; -}) => { +export const assertWebEngineResource = ( + resource: Resource, + validations: { + name?: string; + version?: string; + description?: string; + } +) => { if (validations.name) { assert.strictEqual( resource.attributes[SemanticResourceAttributes.WEBENGINE_NAME], @@ -352,12 +357,14 @@ export const assertEmptyResource = (resource: Resource) => { }; const assertHasOneLabel = (prefix: string, resource: Resource): void => { - const hasOne = Object.entries(SemanticResourceAttributes).find(([key, value]) => { - return ( - key.startsWith(prefix) && - Object.prototype.hasOwnProperty.call(resource.attributes, value) - ); - }); + const hasOne = Object.entries(SemanticResourceAttributes).find( + ([key, value]) => { + return ( + key.startsWith(prefix) && + Object.prototype.hasOwnProperty.call(resource.attributes, value) + ); + } + ); assert.ok( hasOne, diff --git a/packages/opentelemetry-sdk-trace-base/src/BasicTracerProvider.ts b/packages/opentelemetry-sdk-trace-base/src/BasicTracerProvider.ts index aae02ad1ad..5a0889271b 100644 --- a/packages/opentelemetry-sdk-trace-base/src/BasicTracerProvider.ts +++ b/packages/opentelemetry-sdk-trace-base/src/BasicTracerProvider.ts @@ -74,7 +74,11 @@ export class BasicTracerProvider implements TracerProvider { readonly resource: Resource; constructor(config: TracerConfig = {}) { - const mergedConfig = merge({}, loadDefaultConfig(), reconfigureLimits(config)); + const mergedConfig = merge( + {}, + loadDefaultConfig(), + reconfigureLimits(config) + ); this.resource = mergedConfig.resource ?? Resource.empty(); this.resource = Resource.default().merge(this.resource); this._config = Object.assign({}, mergedConfig, { @@ -90,10 +94,21 @@ export class BasicTracerProvider implements TracerProvider { } } - getTracer(name: string, version?: string, options?: { schemaUrl?: string }): Tracer { + getTracer( + name: string, + version?: string, + options?: { schemaUrl?: string } + ): Tracer { const key = `${name}@${version || ''}:${options?.schemaUrl || ''}`; if (!this._tracers.has(key)) { - this._tracers.set(key, new Tracer({ name, version, schemaUrl: options?.schemaUrl }, this._config, this)); + this._tracers.set( + key, + new Tracer( + { name, version, schemaUrl: options?.schemaUrl }, + this._config, + this + ) + ); } // eslint-disable-next-line @typescript-eslint/no-non-null-assertion @@ -211,14 +226,14 @@ export class BasicTracerProvider implements TracerProvider { */ protected _getPropagator(name: string): TextMapPropagator | undefined { return ( - (this.constructor as typeof BasicTracerProvider)._registeredPropagators - ).get(name)?.(); + this.constructor as typeof BasicTracerProvider + )._registeredPropagators.get(name)?.(); } protected _getSpanExporter(name: string): SpanExporter | undefined { return ( - (this.constructor as typeof BasicTracerProvider)._registeredExporters - ).get(name)?.(); + this.constructor as typeof BasicTracerProvider + )._registeredExporters.get(name)?.(); } protected _buildPropagatorFromEnv(): TextMapPropagator | undefined { diff --git a/packages/opentelemetry-sdk-trace-base/src/Span.ts b/packages/opentelemetry-sdk-trace-base/src/Span.ts index 058507b7f7..87ee47e28a 100644 --- a/packages/opentelemetry-sdk-trace-base/src/Span.ts +++ b/packages/opentelemetry-sdk-trace-base/src/Span.ts @@ -24,7 +24,7 @@ import { isTimeInput, otperformance, sanitizeAttributes, - timeInputToHrTime + timeInputToHrTime, } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; @@ -76,7 +76,7 @@ export class Span implements api.Span, ReadableSpan { parentSpanId?: string, links: api.Link[] = [], startTime?: api.TimeInput, - clock: Clock = otperformance, + clock: Clock = otperformance ) { this._clock = clock; this.name = spanName; @@ -90,7 +90,8 @@ export class Span implements api.Span, ReadableSpan { this._spanLimits = parentTracer.getSpanLimits(); this._spanProcessor = parentTracer.getActiveSpanProcessor(); this._spanProcessor.onStart(this, context); - this._attributeValueLengthLimit = this._spanLimits.attributeValueLengthLimit || 0; + this._attributeValueLengthLimit = + this._spanLimits.attributeValueLengthLimit || 0; } spanContext(): api.SpanContext { @@ -111,7 +112,7 @@ export class Span implements api.Span, ReadableSpan { if ( Object.keys(this.attributes).length >= - this._spanLimits.attributeCountLimit! && + this._spanLimits.attributeCountLimit! && !Object.prototype.hasOwnProperty.call(this.attributes, key) ) { return this; @@ -206,15 +207,17 @@ export class Span implements api.Span, ReadableSpan { return this._ended === false; } - recordException(exception: api.Exception, time: api.TimeInput = this._clock.now()): void { + recordException( + exception: api.Exception, + time: api.TimeInput = this._clock.now() + ): void { const attributes: api.SpanAttributes = {}; if (typeof exception === 'string') { attributes[SemanticAttributes.EXCEPTION_MESSAGE] = exception; } else if (exception) { if (exception.code) { - attributes[ - SemanticAttributes.EXCEPTION_TYPE - ] = exception.code.toString(); + attributes[SemanticAttributes.EXCEPTION_TYPE] = + exception.code.toString(); } else if (exception.name) { attributes[SemanticAttributes.EXCEPTION_TYPE] = exception.name; } @@ -247,7 +250,9 @@ export class Span implements api.Span, ReadableSpan { private _isSpanEnded(): boolean { if (this._ended) { - api.diag.warn(`Can not execute the operation on ended Span {traceId: ${this._spanContext.traceId}, spanId: ${this._spanContext.spanId}}`); + api.diag.warn( + `Can not execute the operation on ended Span {traceId: ${this._spanContext.traceId}, spanId: ${this._spanContext.spanId}}` + ); } return this._ended; } @@ -290,7 +295,9 @@ export class Span implements api.Span, ReadableSpan { // Array of strings if (Array.isArray(value)) { - return (value as []).map(val => typeof val === 'string' ? this._truncateToLimitUtil(val, limit) : val); + return (value as []).map(val => + typeof val === 'string' ? this._truncateToLimitUtil(val, limit) : val + ); } // Other types, no need to apply value length limit diff --git a/packages/opentelemetry-sdk-trace-base/src/Tracer.ts b/packages/opentelemetry-sdk-trace-base/src/Tracer.ts index 23d0f47895..6fc0102359 100644 --- a/packages/opentelemetry-sdk-trace-base/src/Tracer.ts +++ b/packages/opentelemetry-sdk-trace-base/src/Tracer.ts @@ -75,7 +75,9 @@ export class Tracer implements api.Tracer { if (isTracingSuppressed(context)) { api.diag.debug('Instrumentation suppressed, returning Noop Span'); - const nonRecordingSpan = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); + const nonRecordingSpan = api.trace.wrapSpanContext( + api.INVALID_SPAN_CONTEXT + ); return nonRecordingSpan; } @@ -84,7 +86,10 @@ export class Tracer implements api.Tracer { let traceId; let traceState; let parentSpanId; - if (!parentSpanContext || !api.trace.isSpanContextValid(parentSpanContext)) { + if ( + !parentSpanContext || + !api.trace.isSpanContextValid(parentSpanContext) + ) { // New root span. traceId = this._idGenerator.generateTraceId(); } else { @@ -118,7 +123,9 @@ export class Tracer implements api.Tracer { : api.TraceFlags.NONE; const spanContext = { traceId, spanId, traceFlags, traceState }; if (samplingResult.decision === api.SamplingDecision.NOT_RECORD) { - api.diag.debug('Recording is off, propagating context in a non-recording span'); + api.diag.debug( + 'Recording is off, propagating context in a non-recording span' + ); const nonRecordingSpan = api.trace.wrapSpanContext(spanContext); return nonRecordingSpan; } @@ -131,11 +138,13 @@ export class Tracer implements api.Tracer { spanKind, parentSpanId, links, - options.startTime, + options.startTime ); // Set initial span attributes. The attributes object may have been mutated // by the sampler, so we sanitize the merged attributes before setting them. - const initAttributes = sanitizeAttributes(Object.assign(attributes, samplingResult.attributes)); + const initAttributes = sanitizeAttributes( + Object.assign(attributes, samplingResult.attributes) + ); span.setAttributes(initAttributes); return span; } diff --git a/packages/opentelemetry-sdk-trace-base/src/config.ts b/packages/opentelemetry-sdk-trace-base/src/config.ts index 47e32f5b35..18753ea70e 100644 --- a/packages/opentelemetry-sdk-trace-base/src/config.ts +++ b/packages/opentelemetry-sdk-trace-base/src/config.ts @@ -15,11 +15,7 @@ */ import { diag } from '@opentelemetry/api'; -import { - getEnv, - TracesSamplerValues, - ENVIRONMENT, -} from '@opentelemetry/core'; +import { getEnv, TracesSamplerValues, ENVIRONMENT } from '@opentelemetry/core'; import { Sampler } from './Sampler'; import { AlwaysOffSampler } from './sampler/AlwaysOffSampler'; import { AlwaysOnSampler } from './sampler/AlwaysOnSampler'; @@ -48,7 +44,8 @@ export function loadDefaultConfig() { attributeCountLimit: getEnv().OTEL_ATTRIBUTE_COUNT_LIMIT, }, spanLimits: { - attributeValueLengthLimit: getEnv().OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT, + attributeValueLengthLimit: + getEnv().OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT, attributeCountLimit: getEnv().OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT, linkCountLimit: getEnv().OTEL_SPAN_LINK_COUNT_LIMIT, eventCountLimit: getEnv().OTEL_SPAN_EVENT_COUNT_LIMIT, @@ -77,10 +74,14 @@ export function buildSamplerFromEnv( root: new AlwaysOffSampler(), }); case TracesSamplerValues.TraceIdRatio: - return new TraceIdRatioBasedSampler(getSamplerProbabilityFromEnv(environment)); + return new TraceIdRatioBasedSampler( + getSamplerProbabilityFromEnv(environment) + ); case TracesSamplerValues.ParentBasedTraceIdRatio: return new ParentBasedSampler({ - root: new TraceIdRatioBasedSampler(getSamplerProbabilityFromEnv(environment)), + root: new TraceIdRatioBasedSampler( + getSamplerProbabilityFromEnv(environment) + ), }); default: diag.error( diff --git a/packages/opentelemetry-sdk-trace-base/src/export/BatchSpanProcessorBase.ts b/packages/opentelemetry-sdk-trace-base/src/export/BatchSpanProcessorBase.ts index 8978fc7d79..1a0968642d 100644 --- a/packages/opentelemetry-sdk-trace-base/src/export/BatchSpanProcessorBase.ts +++ b/packages/opentelemetry-sdk-trace-base/src/export/BatchSpanProcessorBase.ts @@ -14,14 +14,14 @@ * limitations under the License. */ -import {context, Context, diag, TraceFlags} from '@opentelemetry/api'; +import { context, Context, diag, TraceFlags } from '@opentelemetry/api'; import { BindOnceFuture, ExportResultCode, getEnv, globalErrorHandler, suppressTracing, - unrefTimer + unrefTimer, } from '@opentelemetry/core'; import { Span } from '../Span'; import { SpanProcessor } from '../SpanProcessor'; @@ -33,7 +33,9 @@ import { SpanExporter } from './SpanExporter'; * Implementation of the {@link SpanProcessor} that batches spans exported by * the SDK then pushes them to the exporter pipeline. */ -export abstract class BatchSpanProcessorBase implements SpanProcessor { +export abstract class BatchSpanProcessorBase + implements SpanProcessor +{ private readonly _maxExportBatchSize: number; private readonly _maxQueueSize: number; private readonly _scheduledDelayMillis: number; @@ -65,7 +67,9 @@ export abstract class BatchSpanProcessorBase implements this._shutdownOnce = new BindOnceFuture(this._shutdown, this); if (this._maxExportBatchSize > this._maxQueueSize) { - diag.warn('BatchSpanProcessor: maxExportBatchSize must be smaller or equal to maxQueueSize, setting maxExportBatchSize to match maxQueueSize'); + diag.warn( + 'BatchSpanProcessor: maxExportBatchSize must be smaller or equal to maxQueueSize, setting maxExportBatchSize to match maxQueueSize' + ); this._maxExportBatchSize = this._maxQueueSize; } } diff --git a/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts b/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts index 57baa62d3a..8f1b8e1978 100644 --- a/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts +++ b/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts @@ -65,7 +65,7 @@ export class ConsoleSpanExporter implements SpanExporter { attributes: span.attributes, status: span.status, events: span.events, - links: span.links + links: span.links, }; } diff --git a/packages/opentelemetry-sdk-trace-base/src/export/SimpleSpanProcessor.ts b/packages/opentelemetry-sdk-trace-base/src/export/SimpleSpanProcessor.ts index a510ad02a6..5e6064d03e 100644 --- a/packages/opentelemetry-sdk-trace-base/src/export/SimpleSpanProcessor.ts +++ b/packages/opentelemetry-sdk-trace-base/src/export/SimpleSpanProcessor.ts @@ -20,7 +20,7 @@ import { ExportResultCode, globalErrorHandler, BindOnceFuture, - ExportResult + ExportResult, } from '@opentelemetry/core'; import { Span } from '../Span'; import { SpanProcessor } from '../SpanProcessor'; @@ -46,7 +46,7 @@ export class SimpleSpanProcessor implements SpanProcessor { } // does nothing. - onStart(_span: Span, _parentContext: Context): void { } + onStart(_span: Span, _parentContext: Context): void {} onEnd(span: ReadableSpan): void { if (this._shutdownOnce.isCalled) { @@ -57,18 +57,21 @@ export class SimpleSpanProcessor implements SpanProcessor { return; } - internal._export(this._exporter, [span]).then((result: ExportResult) => { - if (result.code !== ExportResultCode.SUCCESS) { - globalErrorHandler( - result.error ?? - new Error( - `SimpleSpanProcessor: span export failed (status ${result})` - ) - ); - } - }).catch(error => { - globalErrorHandler(error); - }); + internal + ._export(this._exporter, [span]) + .then((result: ExportResult) => { + if (result.code !== ExportResultCode.SUCCESS) { + globalErrorHandler( + result.error ?? + new Error( + `SimpleSpanProcessor: span export failed (status ${result})` + ) + ); + } + }) + .catch(error => { + globalErrorHandler(error); + }); } shutdown(): Promise { diff --git a/packages/opentelemetry-sdk-trace-base/src/platform/browser/export/BatchSpanProcessor.ts b/packages/opentelemetry-sdk-trace-base/src/platform/browser/export/BatchSpanProcessor.ts index 12b6eb24a4..385c883e70 100644 --- a/packages/opentelemetry-sdk-trace-base/src/platform/browser/export/BatchSpanProcessor.ts +++ b/packages/opentelemetry-sdk-trace-base/src/platform/browser/export/BatchSpanProcessor.ts @@ -22,13 +22,19 @@ export class BatchSpanProcessor extends BatchSpanProcessorBase void; private _pageHideListener?: () => void; - constructor(_exporter: SpanExporter, config?: BatchSpanProcessorBrowserConfig) { + constructor( + _exporter: SpanExporter, + config?: BatchSpanProcessorBrowserConfig + ) { super(_exporter, config); this.onInit(config); } private onInit(config?: BatchSpanProcessorBrowserConfig): void { - if (config?.disableAutoFlushOnDocumentHide !== true && typeof document !== 'undefined') { + if ( + config?.disableAutoFlushOnDocumentHide !== true && + typeof document !== 'undefined' + ) { this._visibilityChangeListener = () => { if (document.visibilityState === 'hidden') { void this.forceFlush(); @@ -37,7 +43,10 @@ export class BatchSpanProcessor extends BatchSpanProcessorBase { void this.forceFlush(); }; - document.addEventListener('visibilitychange', this._visibilityChangeListener); + document.addEventListener( + 'visibilitychange', + this._visibilityChangeListener + ); // use 'pagehide' event as a fallback for Safari; see https://bugs.webkit.org/show_bug.cgi?id=116769 document.addEventListener('pagehide', this._pageHideListener); @@ -47,7 +56,10 @@ export class BatchSpanProcessor extends BatchSpanProcessorBase { let visibilityState: VisibilityState = 'visible'; @@ -63,7 +64,9 @@ describeDocument('BatchSpanProcessor - web main context', () => { describe('AND disableAutoFlushOnDocumentHide configuration option', () => { it('set to false should force flush spans', () => { - processor = new BatchSpanProcessor(exporter, { disableAutoFlushOnDocumentHide: false }); + processor = new BatchSpanProcessor(exporter, { + disableAutoFlushOnDocumentHide: false, + }); forceFlushSpy = sinon.stub(processor, 'forceFlush'); assert.strictEqual(forceFlushSpy.callCount, 0); hideDocument(); @@ -71,7 +74,9 @@ describeDocument('BatchSpanProcessor - web main context', () => { }); it('set to true should NOT force flush spans', () => { - processor = new BatchSpanProcessor(exporter, { disableAutoFlushOnDocumentHide: true }); + processor = new BatchSpanProcessor(exporter, { + disableAutoFlushOnDocumentHide: true, + }); forceFlushSpy = sinon.stub(processor, 'forceFlush'); assert.strictEqual(forceFlushSpy.callCount, 0); hideDocument(); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/BasicTracerProvider.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/BasicTracerProvider.test.ts index 05ec1152de..8dd49a9c14 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/BasicTracerProvider.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/BasicTracerProvider.test.ts @@ -28,10 +28,7 @@ import { diag, } from '@opentelemetry/api'; import { CompositePropagator } from '@opentelemetry/core'; -import { - TraceState, - W3CTraceContextPropagator, -} from '@opentelemetry/core'; +import { TraceState, W3CTraceContextPropagator } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import * as assert from 'assert'; import * as sinon from 'sinon'; @@ -47,18 +44,10 @@ import { } from '../../src'; class DummyPropagator implements TextMapPropagator { - inject( - context: Context, - carrier: any, - setter: TextMapSetter - ): void { + inject(context: Context, carrier: any, setter: TextMapSetter): void { throw new Error('Method not implemented.'); } - extract( - context: Context, - carrier: any, - getter: TextMapGetter - ): Context { + extract(context: Context, carrier: any, getter: TextMapGetter): Context { throw new Error('Method not implemented.'); } fields(): string[] { @@ -70,13 +59,10 @@ class DummyExporter extends InMemorySpanExporter {} describe('BasicTracerProvider', () => { let envSource: Record; - let setGlobalPropagatorStub: sinon.SinonSpy< - [TextMapPropagator], - boolean - >; + let setGlobalPropagatorStub: sinon.SinonSpy<[TextMapPropagator], boolean>; if (typeof process === 'undefined') { - envSource = (globalThis as unknown) as Record; + envSource = globalThis as unknown as Record; } else { envSource = process.env as Record; } @@ -385,22 +371,25 @@ describe('BasicTracerProvider', () => { protected static override readonly _registeredPropagators = new Map< string, () => TextMapPropagator - >([ - ...BasicTracerProvider._registeredPropagators, - ['custom-propagator', () => new DummyPropagator()], - ]); + >([ + ...BasicTracerProvider._registeredPropagators, + ['custom-propagator', () => new DummyPropagator()], + ]); protected static override readonly _registeredExporters = new Map< string, () => SpanExporter - >([ - ...BasicTracerProvider._registeredExporters, - ['custom-exporter', () => new DummyExporter()], - ]); + >([ + ...BasicTracerProvider._registeredExporters, + ['custom-exporter', () => new DummyExporter()], + ]); } const provider = new CustomTracerProvider({}); - assert(provider['_getPropagator']('tracecontext') instanceof W3CTraceContextPropagator); + assert( + provider['_getPropagator']('tracecontext') instanceof + W3CTraceContextPropagator + ); /* BasicTracerProvider has no exporters by default, so skipping testing the exporter getter */ provider.register(); @@ -410,7 +399,10 @@ describe('BasicTracerProvider', () => { const exporter = processor._exporter; assert(exporter instanceof DummyExporter); - sinon.assert.calledOnceWithExactly(setGlobalPropagatorStub, sinon.match.instanceOf(DummyPropagator)); + sinon.assert.calledOnceWithExactly( + setGlobalPropagatorStub, + sinon.match.instanceOf(DummyPropagator) + ); }); it('the old way of extending still works', () => { @@ -419,25 +411,25 @@ describe('BasicTracerProvider', () => { protected static override readonly _registeredPropagators = new Map< string, () => TextMapPropagator - >([ - ['custom-propagator', () => new DummyPropagator()], - ]); + >([['custom-propagator', () => new DummyPropagator()]]); protected static override readonly _registeredExporters = new Map< string, () => SpanExporter - >([ - ['custom-exporter', () => new DummyExporter()], - ]); + >([['custom-exporter', () => new DummyExporter()]]); - protected override _getPropagator(name: string): TextMapPropagator | undefined { + protected override _getPropagator( + name: string + ): TextMapPropagator | undefined { return ( super._getPropagator(name) || CustomTracerProvider._registeredPropagators.get(name)?.() ); } - protected override _getSpanExporter(name: string): SpanExporter | undefined { + protected override _getSpanExporter( + name: string + ): SpanExporter | undefined { return ( super._getSpanExporter(name) || CustomTracerProvider._registeredExporters.get(name)?.() @@ -453,7 +445,10 @@ describe('BasicTracerProvider', () => { const exporter = processor._exporter; assert(exporter instanceof DummyExporter); - sinon.assert.calledOnceWithExactly(setGlobalPropagatorStub, sinon.match.instanceOf(DummyPropagator)); + sinon.assert.calledOnceWithExactly( + setGlobalPropagatorStub, + sinon.match.instanceOf(DummyPropagator) + ); }); }); @@ -518,7 +513,9 @@ describe('BasicTracerProvider', () => { describe('exporter', () => { class CustomTracerProvider extends BasicTracerProvider { - protected override _getSpanExporter(name: string): SpanExporter | undefined { + protected override _getSpanExporter( + name: string + ): SpanExporter | undefined { return name === 'memory' ? new InMemorySpanExporter() : BasicTracerProvider._registeredExporters.get(name)?.(); @@ -537,7 +534,7 @@ describe('BasicTracerProvider', () => { provider.register(); assert.ok( errorStub.getCall(0).args[0] === - 'Exporter "missing-exporter" requested through environment variable is unavailable.' + 'Exporter "missing-exporter" requested through environment variable is unavailable.' ); errorStub.restore(); }); @@ -649,7 +646,10 @@ describe('BasicTracerProvider', () => { trace.setSpan(ROOT_CONTEXT, span) ); const context = rootSpan.spanContext(); - assert.notStrictEqual(context.traceId, overrideParent.spanContext().traceId); + assert.notStrictEqual( + context.traceId, + overrideParent.spanContext().traceId + ); span.end(); rootSpan.end(); }); @@ -663,7 +663,7 @@ describe('BasicTracerProvider', () => { {}, trace.setSpanContext( ROOT_CONTEXT, - ('invalid-parent' as unknown) as SpanContext + 'invalid-parent' as unknown as SpanContext ) ); assert.ok(span instanceof Span); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/Sampler.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/Sampler.test.ts index 1e4682e356..3819754967 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/Sampler.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/Sampler.test.ts @@ -42,7 +42,14 @@ describe('Sampler', () => { it('Sampler return values should fit SamplerResult', () => { function assertResult(sampler: T) { - const result = sampler.shouldSample(context.active(), 'trace-id', 'span-name', SpanKind.INTERNAL, {}, []); + const result = sampler.shouldSample( + context.active(), + 'trace-id', + 'span-name', + SpanKind.INTERNAL, + {}, + [] + ); assertAssignable(result); assertAssignable(result.decision); } diff --git a/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts index 792c1f88df..df19ace156 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts @@ -43,7 +43,8 @@ const performanceTimeOrigin: HrTime = [1, 1]; describe('Span', () => { beforeEach(() => { - sinon.stub(performance, 'timeOrigin') + sinon + .stub(performance, 'timeOrigin') .value(hrTimeToMilliseconds(performanceTimeOrigin)); }); afterEach(() => { @@ -66,7 +67,7 @@ describe('Span', () => { const linkContext: SpanContext = { traceId: 'e4cda95b652f4a1592b449d5929fda1b', spanId: '7e0c63257de34c92', - traceFlags: TraceFlags.SAMPLED + traceFlags: TraceFlags.SAMPLED, }; it('should create a Span instance', () => { @@ -331,10 +332,23 @@ describe('Span', () => { }); it('should truncate value of arrays which exceeds this limit', () => { - span.setAttribute('attr-array-of-strings', ['abcdefgh', 'abc', 'abcde', '']); + span.setAttribute('attr-array-of-strings', [ + 'abcdefgh', + 'abc', + 'abcde', + '', + ]); span.setAttribute('attr-array-of-bool', [true, false]); - assert.deepStrictEqual(span.attributes['attr-array-of-strings'], ['abcde', 'abc', 'abcde', '']); - assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [true, false]); + assert.deepStrictEqual(span.attributes['attr-array-of-strings'], [ + 'abcde', + 'abc', + 'abcde', + '', + ]); + assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [ + true, + false, + ]); }); it('should not truncate value which length not exceeds this limit', () => { @@ -366,9 +380,20 @@ describe('Span', () => { it('should not truncate any value', () => { span.setAttribute('attr-not-truncate', 'abcdefgh'); - span.setAttribute('attr-array-of-strings', ['abcdefgh', 'abc', 'abcde']); - assert.deepStrictEqual(span.attributes['attr-not-truncate'], 'abcdefgh'); - assert.deepStrictEqual(span.attributes['attr-array-of-strings'], ['abcdefgh', 'abc', 'abcde']); + span.setAttribute('attr-array-of-strings', [ + 'abcdefgh', + 'abc', + 'abcde', + ]); + assert.deepStrictEqual( + span.attributes['attr-not-truncate'], + 'abcdefgh' + ); + assert.deepStrictEqual(span.attributes['attr-array-of-strings'], [ + 'abcdefgh', + 'abc', + 'abcde', + ]); }); }); }); @@ -424,10 +449,23 @@ describe('Span', () => { }); it('should truncate value of arrays which exceeds this limit', () => { - span.setAttribute('attr-array-of-strings', ['abcdefgh', 'abc', 'abcde', '']); + span.setAttribute('attr-array-of-strings', [ + 'abcdefgh', + 'abc', + 'abcde', + '', + ]); span.setAttribute('attr-array-of-bool', [true, false]); - assert.deepStrictEqual(span.attributes['attr-array-of-strings'], ['abcde', 'abc', 'abcde', '']); - assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [true, false]); + assert.deepStrictEqual(span.attributes['attr-array-of-strings'], [ + 'abcde', + 'abc', + 'abcde', + '', + ]); + assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [ + true, + false, + ]); }); it('should not truncate value which length not exceeds this limit', () => { @@ -459,9 +497,20 @@ describe('Span', () => { it('should not truncate any value', () => { span.setAttribute('attr-not-truncate', 'abcdefgh'); - span.setAttribute('attr-array-of-strings', ['abcdefgh', 'abc', 'abcde']); - assert.deepStrictEqual(span.attributes['attr-not-truncate'], 'abcdefgh'); - assert.deepStrictEqual(span.attributes['attr-array-of-strings'], ['abcdefgh', 'abc', 'abcde']); + span.setAttribute('attr-array-of-strings', [ + 'abcdefgh', + 'abc', + 'abcde', + ]); + assert.deepStrictEqual( + span.attributes['attr-not-truncate'], + 'abcdefgh' + ); + assert.deepStrictEqual(span.attributes['attr-array-of-strings'], [ + 'abcdefgh', + 'abc', + 'abcde', + ]); }); }); }); @@ -475,7 +524,7 @@ describe('Span', () => { }, spanLimits: { attributeCountLimit: 5, - } + }, }).getTracer('default'); const span = new Span( @@ -507,7 +556,7 @@ describe('Span', () => { }, spanLimits: { attributeCountLimit: DEFAULT_ATTRIBUTE_COUNT_LIMIT, - } + }, }).getTracer('default'); const span = new Span( @@ -523,7 +572,10 @@ describe('Span', () => { span.end(); it('should remove / drop all remaining values after the number of values exceeds the span limit', () => { - assert.strictEqual(Object.keys(span.attributes).length, DEFAULT_ATTRIBUTE_COUNT_LIMIT); + assert.strictEqual( + Object.keys(span.attributes).length, + DEFAULT_ATTRIBUTE_COUNT_LIMIT + ); assert.strictEqual(span.attributes['foo0'], 'bar0'); assert.strictEqual(span.attributes['foo10'], 'bar10'); assert.strictEqual(span.attributes['foo127'], 'bar127'); @@ -557,10 +609,23 @@ describe('Span', () => { }); it('should truncate value of arrays which exceeds span limit', () => { - span.setAttribute('attr-array-of-strings', ['abcdefgh', 'abc', 'abcde', '']); + span.setAttribute('attr-array-of-strings', [ + 'abcdefgh', + 'abc', + 'abcde', + '', + ]); span.setAttribute('attr-array-of-bool', [true, false]); - assert.deepStrictEqual(span.attributes['attr-array-of-strings'], ['abcde', 'abc', 'abcde', '']); - assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [true, false]); + assert.deepStrictEqual(span.attributes['attr-array-of-strings'], [ + 'abcde', + 'abc', + 'abcde', + '', + ]); + assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [ + true, + false, + ]); }); it('should not truncate value which length not exceeds span limit', () => { @@ -596,14 +661,30 @@ describe('Span', () => { it('should not truncate value', () => { span.setAttribute('attr-with-more-length', 'abcdefghijklmn'); - assert.strictEqual(span.attributes['attr-with-more-length'], 'abcdefghijklmn'); + assert.strictEqual( + span.attributes['attr-with-more-length'], + 'abcdefghijklmn' + ); }); it('should not truncate value of arrays', () => { - span.setAttribute('attr-array-of-strings', ['abcdefghijklmn', 'abc', 'abcde', '']); + span.setAttribute('attr-array-of-strings', [ + 'abcdefghijklmn', + 'abc', + 'abcde', + '', + ]); span.setAttribute('attr-array-of-bool', [true, false]); - assert.deepStrictEqual(span.attributes['attr-array-of-strings'], ['abcdefghijklmn', 'abc', 'abcde', '']); - assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [true, false]); + assert.deepStrictEqual(span.attributes['attr-array-of-strings'], [ + 'abcdefghijklmn', + 'abc', + 'abcde', + '', + ]); + assert.deepStrictEqual(span.attributes['attr-array-of-bool'], [ + true, + false, + ]); }); it('should return same value for non-string values', () => { @@ -653,7 +734,10 @@ describe('Span', () => { spanContext, SpanKind.CLIENT ); - span.addEvent('rev', { ...validAttributes, ...invalidAttributes } as unknown as SpanAttributes); + span.addEvent('rev', { + ...validAttributes, + ...invalidAttributes, + } as unknown as SpanAttributes); span.end(); assert.strictEqual(span.events.length, 1); @@ -671,7 +755,7 @@ describe('Span', () => { const linkContext: SpanContext = { traceId: 'b3cda95b652f4a1592b449d5929fda1b', spanId: '6e0c63257de34c92', - traceFlags: TraceFlags.SAMPLED + traceFlags: TraceFlags.SAMPLED, }; const attributes = { attr1: 'value', attr2: 123, attr3: true }; const span = new Span( diff --git a/packages/opentelemetry-sdk-trace-base/test/common/Tracer.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/Tracer.test.ts index d55374514c..1dcbffff58 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/Tracer.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/Tracer.test.ts @@ -24,13 +24,13 @@ import { SpanContext, SpanKind, trace, - TraceFlags + TraceFlags, } from '@opentelemetry/api'; import { getSpan } from '@opentelemetry/api/build/src/trace/context-utils'; import { InstrumentationLibrary, sanitizeAttributes, - suppressTracing + suppressTracing, } from '@opentelemetry/core'; import * as assert from 'assert'; import { @@ -51,17 +51,27 @@ describe('Tracer', () => { const tracerProvider = new BasicTracerProvider(); let envSource: Record; if (typeof process === 'undefined') { - envSource = (globalThis as unknown) as Record; + envSource = globalThis as unknown as Record; } else { envSource = process.env as Record; } class TestSampler implements Sampler { - shouldSample(_context: Context, _traceId: string, _spanName: string, _spanKind: SpanKind, attributes: SpanAttributes, links: Link[]) { + shouldSample( + _context: Context, + _traceId: string, + _spanName: string, + _spanKind: SpanKind, + attributes: SpanAttributes, + links: Link[] + ) { // The attributes object should be valid. assert.deepStrictEqual(sanitizeAttributes(attributes), attributes); links.forEach(link => { - assert.deepStrictEqual(sanitizeAttributes(link.attributes), link.attributes); + assert.deepStrictEqual( + sanitizeAttributes(link.attributes), + link.attributes + ); }); return { decision: SamplingDecision.RECORD_AND_SAMPLED, @@ -75,7 +85,7 @@ describe('Tracer', () => { } class DummySpanProcessor implements SpanProcessor { - forceFlush () { + forceFlush() { return Promise.resolve(); } onStart() {} @@ -239,7 +249,15 @@ describe('Tracer', () => { const tracer = new Tracer({ name: 'default' }, { sampler }, tp); const span = tracer.startSpan('a', {}, context) as Span; assert.strictEqual(span.parentSpanId, parent.spanId); - sinon.assert.calledOnceWithExactly(shouldSampleSpy, context, parent.traceId, 'a', SpanKind.INTERNAL, {}, []); + sinon.assert.calledOnceWithExactly( + shouldSampleSpy, + context, + parent.traceId, + 'a', + SpanKind.INTERNAL, + {}, + [] + ); sinon.assert.calledOnceWithExactly(onStartSpy, span, context); }); @@ -320,15 +338,18 @@ describe('Tracer', () => { const spy = sinon.spy(tracer, 'startSpan'); - assert.strictEqual(tracer.startActiveSpan('my-span', span => { - try { - assert(spy.calledWith('my-span')); - assert.strictEqual(getSpan(context.active()), span); - return 1; - } finally { - span.end(); - } - }), 1); + assert.strictEqual( + tracer.startActiveSpan('my-span', span => { + try { + assert(spy.calledWith('my-span')); + assert.strictEqual(getSpan(context.active()), span); + return 1; + } finally { + span.end(); + } + }), + 1 + ); }); it('should start an active span with name, options and function args', () => { @@ -340,15 +361,22 @@ describe('Tracer', () => { const spy = sinon.spy(tracer, 'startSpan'); - assert.strictEqual(tracer.startActiveSpan('my-span', {attributes: {foo: 'bar'}}, span => { - try { - assert(spy.calledWith('my-span', {attributes: {foo: 'bar'}})); - assert.strictEqual(getSpan(context.active()), span); - return 1; - } finally { - span.end(); - } - }), 1); + assert.strictEqual( + tracer.startActiveSpan( + 'my-span', + { attributes: { foo: 'bar' } }, + span => { + try { + assert(spy.calledWith('my-span', { attributes: { foo: 'bar' } })); + assert.strictEqual(getSpan(context.active()), span); + return 1; + } finally { + span.end(); + } + } + ), + 1 + ); }); it('should start an active span with name, options, context and function args', () => { @@ -364,16 +392,26 @@ describe('Tracer', () => { const spy = sinon.spy(tracer, 'startSpan'); - assert.strictEqual(tracer.startActiveSpan('my-span', {attributes: {foo: 'bar'}}, ctx, span => { - try { - assert(spy.calledWith('my-span', {attributes: {foo: 'bar'}}, ctx)); - assert.strictEqual(getSpan(context.active()), span); - assert.strictEqual(ctx.getValue(ctxKey), 'bar'); - return 1; - } finally { - span.end(); - } - }), 1); + assert.strictEqual( + tracer.startActiveSpan( + 'my-span', + { attributes: { foo: 'bar' } }, + ctx, + span => { + try { + assert( + spy.calledWith('my-span', { attributes: { foo: 'bar' } }, ctx) + ); + assert.strictEqual(getSpan(context.active()), span); + assert.strictEqual(ctx.getValue(ctxKey), 'bar'); + return 1; + } finally { + span.end(); + } + } + ), + 1 + ); }); it('should sample with valid attributes', () => { @@ -383,20 +421,28 @@ describe('Tracer', () => { tracerProvider ); - const attributes = { ...validAttributes, ...invalidAttributes } as unknown as SpanAttributes; - const links = [{ - context: { - traceId: 'b3cda95b652f4a1592b449d5929fda1b', - spanId: '6e0c63257de34c92', - traceFlags: TraceFlags.SAMPLED + const attributes = { + ...validAttributes, + ...invalidAttributes, + } as unknown as SpanAttributes; + const links = [ + { + context: { + traceId: 'b3cda95b652f4a1592b449d5929fda1b', + spanId: '6e0c63257de34c92', + traceFlags: TraceFlags.SAMPLED, + }, + attributes: { ...attributes }, }, - attributes: { ...attributes }, - }]; + ]; // TestSampler should validate the attributes and links. const span = tracer.startSpan('my-span', { attributes, links }) as Span; span.end(); - assert.deepStrictEqual(span.attributes, { ...validAttributes, testAttribute: 'foobar' }); + assert.deepStrictEqual(span.attributes, { + ...validAttributes, + testAttribute: 'foobar', + }); assert.strictEqual(span.links.length, 1); assert.deepStrictEqual(span.links[0].attributes, validAttributes); }); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/config.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/config.test.ts index c3c929fad2..7f727bddf2 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/config.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/config.test.ts @@ -26,7 +26,7 @@ import { buildSamplerFromEnv } from '../../src/config'; describe('config', () => { let envSource: Record; if (typeof process === 'undefined') { - envSource = (globalThis as unknown) as Record; + envSource = globalThis as unknown as Record; } else { envSource = process.env as Record; } diff --git a/packages/opentelemetry-sdk-trace-base/test/common/export/BatchSpanProcessorBase.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/export/BatchSpanProcessorBase.test.ts index 229e2bdb3c..9149e6cbf8 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/export/BatchSpanProcessorBase.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/export/BatchSpanProcessorBase.test.ts @@ -18,11 +18,17 @@ import { diag, ROOT_CONTEXT } from '@opentelemetry/api'; import { ExportResultCode, loggingErrorHandler, - setGlobalErrorHandler + setGlobalErrorHandler, } from '@opentelemetry/core'; import * as assert from 'assert'; import * as sinon from 'sinon'; -import { AlwaysOnSampler, BasicTracerProvider, BufferConfig, InMemorySpanExporter, Span } from '../../../src'; +import { + AlwaysOnSampler, + BasicTracerProvider, + BufferConfig, + InMemorySpanExporter, + Span, +} from '../../../src'; import { context } from '@opentelemetry/api'; import { TestRecordOnlySampler } from './TestRecordOnlySampler'; import { TestTracingSpanExporter } from './TestTracingSpanExporter'; @@ -96,7 +102,7 @@ describe('BatchSpanProcessorBase', () => { let env: Record; if (typeof process === 'undefined') { - env = (globalThis as unknown) as Record; + env = globalThis as unknown as Record; } else { env = process.env as Record; } @@ -405,7 +411,8 @@ describe('BatchSpanProcessorBase', () => { processor.onEnd(span); processor.forceFlush().then(() => { - const exporterCreatedSpans = testTracingExporter.getExporterCreatedSpans(); + const exporterCreatedSpans = + testTracingExporter.getExporterCreatedSpans(); assert.equal(exporterCreatedSpans.length, 0); done(); @@ -441,14 +448,16 @@ describe('BatchSpanProcessorBase', () => { describe('when "maxExportBatchSize" is greater than "maxQueueSize"', () => { beforeEach(() => { - processor = new BatchSpanProcessor( - exporter,{ - maxExportBatchSize: 7, - maxQueueSize: 6, - }); + processor = new BatchSpanProcessor(exporter, { + maxExportBatchSize: 7, + maxQueueSize: 6, + }); }); it('should match maxQueueSize', () => { - assert.equal(processor['_maxExportBatchSize'], processor['_maxQueueSize']); + assert.equal( + processor['_maxExportBatchSize'], + processor['_maxQueueSize'] + ); }); }); }); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts index e6712adb95..633cac3ec7 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts @@ -14,10 +14,7 @@ * limitations under the License. */ -import { - SpanContext, - TraceFlags, -} from '@opentelemetry/api'; +import { SpanContext, TraceFlags } from '@opentelemetry/api'; import * as assert from 'assert'; import * as sinon from 'sinon'; import { @@ -64,7 +61,7 @@ describe('ConsoleSpanExporter', () => { traceFlags: TraceFlags.SAMPLED, }; const span = tracer.startSpan('foo', { - links: [ { context, attributes: { anAttr: 'aValue' } } ] + links: [{ context, attributes: { anAttr: 'aValue' } }], }); span.addEvent('foobar'); span.end(); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/export/InMemorySpanExporter.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/export/InMemorySpanExporter.test.ts index 66514155f0..df9105d97b 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/export/InMemorySpanExporter.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/export/InMemorySpanExporter.test.ts @@ -54,8 +54,14 @@ describe('InMemorySpanExporter', () => { assert.strictEqual(span1.name, 'grand-child'); assert.strictEqual(span2.name, 'child'); assert.strictEqual(span3.name, 'root'); - assert.strictEqual(span1.spanContext().traceId, span2.spanContext().traceId); - assert.strictEqual(span2.spanContext().traceId, span3.spanContext().traceId); + assert.strictEqual( + span1.spanContext().traceId, + span2.spanContext().traceId + ); + assert.strictEqual( + span2.spanContext().traceId, + span3.spanContext().traceId + ); assert.strictEqual(span1.parentSpanId, span2.spanContext().spanId); assert.strictEqual(span2.parentSpanId, span3.spanContext().spanId); }); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/export/SimpleSpanProcessor.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/export/SimpleSpanProcessor.test.ts index b127502ad8..fda60f500e 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/export/SimpleSpanProcessor.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/export/SimpleSpanProcessor.test.ts @@ -198,7 +198,8 @@ describe('SimpleSpanProcessor', () => { processor.onStart(span, ROOT_CONTEXT); processor.onEnd(span); - const exporterCreatedSpans = testTracingExporter.getExporterCreatedSpans(); + const exporterCreatedSpans = + testTracingExporter.getExporterCreatedSpans(); assert.equal(exporterCreatedSpans.length, 0); }); }); diff --git a/packages/opentelemetry-sdk-trace-node/test/NodeTracerProvider.test.ts b/packages/opentelemetry-sdk-trace-node/test/NodeTracerProvider.test.ts index 3ae6011759..1b1dbbd6d8 100644 --- a/packages/opentelemetry-sdk-trace-node/test/NodeTracerProvider.test.ts +++ b/packages/opentelemetry-sdk-trace-node/test/NodeTracerProvider.test.ts @@ -136,7 +136,11 @@ describe('NodeTracerProvider', () => { const span = provider .getTracer('default') - .startSpan('child-span', {}, trace.setSpan(ROOT_CONTEXT, sampledParent)); + .startSpan( + 'child-span', + {}, + trace.setSpan(ROOT_CONTEXT, sampledParent) + ); assert.ok(span instanceof Span); assert.strictEqual(span.spanContext().traceFlags, TraceFlags.SAMPLED); assert.strictEqual(span.isRecording(), true); @@ -148,7 +152,9 @@ describe('NodeTracerProvider', () => { assert.ok(span); assert.ok(span.resource instanceof Resource); assert.equal( - span.resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], + span.resource.attributes[ + SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE + ], 'nodejs' ); }); @@ -247,14 +253,9 @@ describe('NodeTracerProvider', () => { }); }); - describe('Custom TracerProvider through inheritance', () => { class DummyPropagator implements TextMapPropagator { - inject( - context: Context, - carrier: any, - setter: TextMapSetter - ): void { + inject(context: Context, carrier: any, setter: TextMapSetter): void { throw new Error('Method not implemented.'); } extract( @@ -296,16 +297,12 @@ describe('NodeTracerProvider', () => { protected static override readonly _registeredPropagators = new Map< string, () => TextMapPropagator - >([ - ['custom-propagator', () => propagator], - ]); + >([['custom-propagator', () => propagator]]); protected static override readonly _registeredExporters = new Map< string, () => SpanExporter - >([ - ['custom-exporter', () => new DummyExporter()], - ]); + >([['custom-exporter', () => new DummyExporter()]]); } const provider = new CustomTracerProvider({}); @@ -327,25 +324,25 @@ describe('NodeTracerProvider', () => { protected static override readonly _registeredPropagators = new Map< string, () => TextMapPropagator - >([ - ['custom-propagator', () => propagator], - ]); + >([['custom-propagator', () => propagator]]); protected static override readonly _registeredExporters = new Map< string, () => SpanExporter - >([ - ['custom-exporter', () => new DummyExporter()], - ]); + >([['custom-exporter', () => new DummyExporter()]]); - protected override _getPropagator(name: string): TextMapPropagator | undefined { + protected override _getPropagator( + name: string + ): TextMapPropagator | undefined { return ( super._getPropagator(name) || CustomTracerProvider._registeredPropagators.get(name)?.() ); } - protected override _getSpanExporter(name: string): SpanExporter | undefined { + protected override _getSpanExporter( + name: string + ): SpanExporter | undefined { return ( super._getSpanExporter(name) || CustomTracerProvider._registeredExporters.get(name)?.() diff --git a/packages/opentelemetry-sdk-trace-node/test/registration.test.ts b/packages/opentelemetry-sdk-trace-node/test/registration.test.ts index 09cc24b327..5c35f8de97 100644 --- a/packages/opentelemetry-sdk-trace-node/test/registration.test.ts +++ b/packages/opentelemetry-sdk-trace-node/test/registration.test.ts @@ -55,7 +55,8 @@ describe('API registration', () => { assertInstanceOf(context['_getContextManager'](), DefaultContextManager); assertInstanceOf( - propagation['_getGlobalPropagator'](), CompositePropagator + propagation['_getGlobalPropagator'](), + CompositePropagator ); const apiTracerProvider = trace.getTracerProvider() as ProxyTracerProvider; @@ -87,10 +88,15 @@ describe('API registration', () => { contextManager: null, }); - assert.strictEqual(context['_getContextManager'](), ctxManager, 'context manager should not change'); + assert.strictEqual( + context['_getContextManager'](), + ctxManager, + 'context manager should not change' + ); assertInstanceOf( - propagation['_getGlobalPropagator'](), CompositePropagator + propagation['_getGlobalPropagator'](), + CompositePropagator ); const apiTracerProvider = trace.getTracerProvider() as ProxyTracerProvider; diff --git a/packages/opentelemetry-sdk-trace-web/src/StackContextManager.ts b/packages/opentelemetry-sdk-trace-web/src/StackContextManager.ts index fa4e92ad1a..b15b0b03ca 100644 --- a/packages/opentelemetry-sdk-trace-web/src/StackContextManager.ts +++ b/packages/opentelemetry-sdk-trace-web/src/StackContextManager.ts @@ -51,7 +51,7 @@ export class StackContextManager implements ContextManager { writable: false, value: target.length, }); - return (contextWrapper as unknown) as T; + return contextWrapper as unknown as T; } /** diff --git a/packages/opentelemetry-sdk-trace-web/src/utils.ts b/packages/opentelemetry-sdk-trace-web/src/utils.ts index 664bfceccb..30031e0623 100644 --- a/packages/opentelemetry-sdk-trace-web/src/utils.ts +++ b/packages/opentelemetry-sdk-trace-web/src/utils.ts @@ -108,7 +108,9 @@ export function addSpanNetworkEvents( * sort resources by startTime * @param filteredResources */ -export function sortResources(filteredResources: PerformanceResourceTiming[]): PerformanceResourceTiming[] { +export function sortResources( + filteredResources: PerformanceResourceTiming[] +): PerformanceResourceTiming[] { return filteredResources.slice().sort((a, b) => { const valueA = a[PTN.FETCH_START]; const valueB = b[PTN.FETCH_START]; diff --git a/packages/opentelemetry-sdk-trace-web/test/StackContextManager.test.ts b/packages/opentelemetry-sdk-trace-web/test/StackContextManager.test.ts index 8fb1e34d13..bcad24015d 100644 --- a/packages/opentelemetry-sdk-trace-web/test/StackContextManager.test.ts +++ b/packages/opentelemetry-sdk-trace-web/test/StackContextManager.test.ts @@ -182,13 +182,19 @@ describe('StackContextManager', () => { it('should return the same target (when enabled)', () => { const test = ROOT_CONTEXT.setValue(key1, 1); - assert.deepStrictEqual(contextManager.bind(contextManager.active(), test), test); + assert.deepStrictEqual( + contextManager.bind(contextManager.active(), test), + test + ); }); it('should return the same target (when disabled)', () => { contextManager.disable(); const test = ROOT_CONTEXT.setValue(key1, 1); - assert.deepStrictEqual(contextManager.bind(contextManager.active(), test), test); + assert.deepStrictEqual( + contextManager.bind(contextManager.active(), test), + test + ); contextManager.enable(); }); diff --git a/packages/opentelemetry-sdk-trace-web/test/WebTracerProvider.test.ts b/packages/opentelemetry-sdk-trace-web/test/WebTracerProvider.test.ts index e9e83fc654..de63292a6a 100644 --- a/packages/opentelemetry-sdk-trace-web/test/WebTracerProvider.test.ts +++ b/packages/opentelemetry-sdk-trace-web/test/WebTracerProvider.test.ts @@ -96,12 +96,10 @@ describe('WebTracerProvider', () => { trace.getSpan(context.active()) === rootSpan, 'Current span is rootSpan' ); - const concurrentSpan1 = webTracerWithZone.startSpan( - 'concurrentSpan1' - ); - const concurrentSpan2 = webTracerWithZone.startSpan( - 'concurrentSpan2' - ); + const concurrentSpan1 = + webTracerWithZone.startSpan('concurrentSpan1'); + const concurrentSpan2 = + webTracerWithZone.startSpan('concurrentSpan2'); context.with(trace.setSpan(context.active(), concurrentSpan1), () => { setTimeout(() => { @@ -132,7 +130,9 @@ describe('WebTracerProvider', () => { assert.ok(span); assert.ok(span.resource instanceof Resource); assert.equal( - span.resource.attributes[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], + span.resource.attributes[ + SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE + ], 'webjs' ); }); diff --git a/packages/opentelemetry-sdk-trace-web/test/registration.test.ts b/packages/opentelemetry-sdk-trace-web/test/registration.test.ts index e001b2b742..0de022a65f 100644 --- a/packages/opentelemetry-sdk-trace-web/test/registration.test.ts +++ b/packages/opentelemetry-sdk-trace-web/test/registration.test.ts @@ -46,7 +46,7 @@ describe('API registration', () => { it('should register configured implementations', () => { const tracerProvider = new WebTracerProvider(); - const contextManager = { disable() { }, enable() { } } as any; + const contextManager = { disable() {}, enable() {} } as any; const propagator = {} as any; tracerProvider.register({ @@ -68,7 +68,11 @@ describe('API registration', () => { contextManager: null, }); - assert.strictEqual(context['_getContextManager'](), ctxManager, 'context manager should not change'); + assert.strictEqual( + context['_getContextManager'](), + ctxManager, + 'context manager should not change' + ); assert.ok( propagation['_getGlobalPropagator']() instanceof CompositePropagator @@ -84,7 +88,11 @@ describe('API registration', () => { propagator: null, }); - assert.strictEqual(propagation['_getGlobalPropagator'](), propagator, 'propagator should not change'); + assert.strictEqual( + propagation['_getGlobalPropagator'](), + propagator, + 'propagator should not change' + ); assert.ok(context['_getContextManager']() instanceof StackContextManager); const apiTracerProvider = trace.getTracerProvider() as ProxyTracerProvider; diff --git a/packages/opentelemetry-sdk-trace-web/test/utils.test.ts b/packages/opentelemetry-sdk-trace-web/test/utils.test.ts index 144a773c7f..06e550c1f0 100644 --- a/packages/opentelemetry-sdk-trace-web/test/utils.test.ts +++ b/packages/opentelemetry-sdk-trace-web/test/utils.test.ts @@ -95,10 +95,10 @@ describe('utils', () => { it('should add all network events to span', () => { const addEventSpy = sinon.spy(); const setAttributeSpy = sinon.spy(); - const span = ({ + const span = { addEvent: addEventSpy, setAttribute: setAttributeSpy, - } as unknown) as tracing.Span; + } as unknown as tracing.Span; const entries = { [PTN.FETCH_START]: 123, [PTN.DOMAIN_LOOKUP_START]: 123, @@ -123,10 +123,10 @@ describe('utils', () => { it('should only include encoded size when content encoding is being used', () => { const addEventSpy = sinon.spy(); const setAttributeSpy = sinon.spy(); - const span = ({ + const span = { addEvent: addEventSpy, setAttribute: setAttributeSpy, - } as unknown) as tracing.Span; + } as unknown as tracing.Span; const entries = { [PTN.DECODED_BODY_SIZE]: 123, [PTN.ENCODED_BODY_SIZE]: 123, @@ -145,9 +145,9 @@ describe('utils', () => { describe(`when entry is ${value}`, () => { it('should add event to span', () => { const addEventSpy = sinon.spy(); - const span = ({ + const span = { addEvent: addEventSpy, - } as unknown) as tracing.Span; + } as unknown as tracing.Span; const entries = { [PTN.FETCH_START]: value, } as PerformanceEntries; @@ -167,9 +167,9 @@ describe('utils', () => { describe('when entry is not numeric', () => { it('should NOT add event to span', () => { const addEventSpy = sinon.spy(); - const span = ({ + const span = { addEvent: addEventSpy, - } as unknown) as tracing.Span; + } as unknown as tracing.Span; const entries = { [PTN.FETCH_START]: 'non-numeric', } as unknown; @@ -188,9 +188,9 @@ describe('utils', () => { describe('when entries does NOT contain the performance', () => { it('should NOT add event to span', () => { const addEventSpy = sinon.spy(); - const span = ({ + const span = { addEvent: addEventSpy, - } as unknown) as tracing.Span; + } as unknown as tracing.Span; const entries = { [PTN.FETCH_START]: 123, } as PerformanceEntries; diff --git a/packages/opentelemetry-semantic-conventions/src/resource/SemanticResourceAttributes.ts b/packages/opentelemetry-semantic-conventions/src/resource/SemanticResourceAttributes.ts index ead775e80e..e57e4d959c 100644 --- a/packages/opentelemetry-semantic-conventions/src/resource/SemanticResourceAttributes.ts +++ b/packages/opentelemetry-semantic-conventions/src/resource/SemanticResourceAttributes.ts @@ -16,153 +16,152 @@ // DO NOT EDIT, this is an Auto-generated file from scripts/semconv/templates//templates/SemanticAttributes.ts.j2 export const SemanticResourceAttributes = { - /** - * Name of the cloud provider. - */ + * Name of the cloud provider. + */ CLOUD_PROVIDER: 'cloud.provider', /** - * The cloud account ID the resource is assigned to. - */ + * The cloud account ID the resource is assigned to. + */ CLOUD_ACCOUNT_ID: 'cloud.account.id', /** - * The geographical region the resource is running. Refer to your provider's docs to see the available regions, for example [Alibaba Cloud regions](https://www.alibabacloud.com/help/doc-detail/40654.htm), [AWS regions](https://aws.amazon.com/about-aws/global-infrastructure/regions_az/), [Azure regions](https://azure.microsoft.com/en-us/global-infrastructure/geographies/), or [Google Cloud regions](https://cloud.google.com/about/locations). - */ + * The geographical region the resource is running. Refer to your provider's docs to see the available regions, for example [Alibaba Cloud regions](https://www.alibabacloud.com/help/doc-detail/40654.htm), [AWS regions](https://aws.amazon.com/about-aws/global-infrastructure/regions_az/), [Azure regions](https://azure.microsoft.com/en-us/global-infrastructure/geographies/), or [Google Cloud regions](https://cloud.google.com/about/locations). + */ CLOUD_REGION: 'cloud.region', /** - * Cloud regions often have multiple, isolated locations known as zones to increase availability. Availability zone represents the zone where the resource is running. - * - * Note: Availability zones are called "zones" on Alibaba Cloud and Google Cloud. - */ + * Cloud regions often have multiple, isolated locations known as zones to increase availability. Availability zone represents the zone where the resource is running. + * + * Note: Availability zones are called "zones" on Alibaba Cloud and Google Cloud. + */ CLOUD_AVAILABILITY_ZONE: 'cloud.availability_zone', /** - * The cloud platform in use. - * - * Note: The prefix of the service SHOULD match the one specified in `cloud.provider`. - */ + * The cloud platform in use. + * + * Note: The prefix of the service SHOULD match the one specified in `cloud.provider`. + */ CLOUD_PLATFORM: 'cloud.platform', /** - * The Amazon Resource Name (ARN) of an [ECS container instance](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_instances.html). - */ + * The Amazon Resource Name (ARN) of an [ECS container instance](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_instances.html). + */ AWS_ECS_CONTAINER_ARN: 'aws.ecs.container.arn', /** - * The ARN of an [ECS cluster](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/clusters.html). - */ + * The ARN of an [ECS cluster](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/clusters.html). + */ AWS_ECS_CLUSTER_ARN: 'aws.ecs.cluster.arn', /** - * The [launch type](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/launch_types.html) for an ECS task. - */ + * The [launch type](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/launch_types.html) for an ECS task. + */ AWS_ECS_LAUNCHTYPE: 'aws.ecs.launchtype', /** - * The ARN of an [ECS task definition](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definitions.html). - */ + * The ARN of an [ECS task definition](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definitions.html). + */ AWS_ECS_TASK_ARN: 'aws.ecs.task.arn', /** - * The task definition family this task definition is a member of. - */ + * The task definition family this task definition is a member of. + */ AWS_ECS_TASK_FAMILY: 'aws.ecs.task.family', /** - * The revision for this task definition. - */ + * The revision for this task definition. + */ AWS_ECS_TASK_REVISION: 'aws.ecs.task.revision', /** - * The ARN of an EKS cluster. - */ + * The ARN of an EKS cluster. + */ AWS_EKS_CLUSTER_ARN: 'aws.eks.cluster.arn', /** - * The name(s) of the AWS log group(s) an application is writing to. - * - * Note: Multiple log groups must be supported for cases like multi-container applications, where a single application has sidecar containers, and each write to their own log group. - */ + * The name(s) of the AWS log group(s) an application is writing to. + * + * Note: Multiple log groups must be supported for cases like multi-container applications, where a single application has sidecar containers, and each write to their own log group. + */ AWS_LOG_GROUP_NAMES: 'aws.log.group.names', /** - * The Amazon Resource Name(s) (ARN) of the AWS log group(s). - * - * Note: See the [log group ARN format documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format). - */ + * The Amazon Resource Name(s) (ARN) of the AWS log group(s). + * + * Note: See the [log group ARN format documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format). + */ AWS_LOG_GROUP_ARNS: 'aws.log.group.arns', /** - * The name(s) of the AWS log stream(s) an application is writing to. - */ + * The name(s) of the AWS log stream(s) an application is writing to. + */ AWS_LOG_STREAM_NAMES: 'aws.log.stream.names', /** - * The ARN(s) of the AWS log stream(s). - * - * Note: See the [log stream ARN format documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format). One log group can contain several log streams, so these ARNs necessarily identify both a log group and a log stream. - */ + * The ARN(s) of the AWS log stream(s). + * + * Note: See the [log stream ARN format documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format). One log group can contain several log streams, so these ARNs necessarily identify both a log group and a log stream. + */ AWS_LOG_STREAM_ARNS: 'aws.log.stream.arns', /** - * Container name. - */ + * Container name. + */ CONTAINER_NAME: 'container.name', /** - * Container ID. Usually a UUID, as for example used to [identify Docker containers](https://docs.docker.com/engine/reference/run/#container-identification). The UUID might be abbreviated. - */ + * Container ID. Usually a UUID, as for example used to [identify Docker containers](https://docs.docker.com/engine/reference/run/#container-identification). The UUID might be abbreviated. + */ CONTAINER_ID: 'container.id', /** - * The container runtime managing this container. - */ + * The container runtime managing this container. + */ CONTAINER_RUNTIME: 'container.runtime', /** - * Name of the image the container was built on. - */ + * Name of the image the container was built on. + */ CONTAINER_IMAGE_NAME: 'container.image.name', /** - * Container image tag. - */ + * Container image tag. + */ CONTAINER_IMAGE_TAG: 'container.image.tag', /** - * Name of the [deployment environment](https://en.wikipedia.org/wiki/Deployment_environment) (aka deployment tier). - */ + * Name of the [deployment environment](https://en.wikipedia.org/wiki/Deployment_environment) (aka deployment tier). + */ DEPLOYMENT_ENVIRONMENT: 'deployment.environment', /** - * A unique identifier representing the device. - * - * Note: The device identifier MUST only be defined using the values outlined below. This value is not an advertising identifier and MUST NOT be used as such. On iOS (Swift or Objective-C), this value MUST be equal to the [vendor identifier](https://developer.apple.com/documentation/uikit/uidevice/1620059-identifierforvendor). On Android (Java or Kotlin), this value MUST be equal to the Firebase Installation ID or a globally unique UUID which is persisted across sessions in your application. More information can be found [here](https://developer.android.com/training/articles/user-data-ids) on best practices and exact implementation details. Caution should be taken when storing personal data or anything which can identify a user. GDPR and data protection laws may apply, ensure you do your own due diligence. - */ + * A unique identifier representing the device. + * + * Note: The device identifier MUST only be defined using the values outlined below. This value is not an advertising identifier and MUST NOT be used as such. On iOS (Swift or Objective-C), this value MUST be equal to the [vendor identifier](https://developer.apple.com/documentation/uikit/uidevice/1620059-identifierforvendor). On Android (Java or Kotlin), this value MUST be equal to the Firebase Installation ID or a globally unique UUID which is persisted across sessions in your application. More information can be found [here](https://developer.android.com/training/articles/user-data-ids) on best practices and exact implementation details. Caution should be taken when storing personal data or anything which can identify a user. GDPR and data protection laws may apply, ensure you do your own due diligence. + */ DEVICE_ID: 'device.id', /** - * The model identifier for the device. - * - * Note: It's recommended this value represents a machine readable version of the model identifier rather than the market or consumer-friendly name of the device. - */ + * The model identifier for the device. + * + * Note: It's recommended this value represents a machine readable version of the model identifier rather than the market or consumer-friendly name of the device. + */ DEVICE_MODEL_IDENTIFIER: 'device.model.identifier', /** - * The marketing name for the device model. - * - * Note: It's recommended this value represents a human readable version of the device model rather than a machine readable alternative. - */ + * The marketing name for the device model. + * + * Note: It's recommended this value represents a human readable version of the device model rather than a machine readable alternative. + */ DEVICE_MODEL_NAME: 'device.model.name', /** - * The name of the single function that this runtime instance executes. - * - * Note: This is the name of the function as configured/deployed on the FaaS platform and is usually different from the name of the callback function (which may be stored in the [`code.namespace`/`code.function`](../../trace/semantic_conventions/span-general.md#source-code-attributes) span attributes). - */ + * The name of the single function that this runtime instance executes. + * + * Note: This is the name of the function as configured/deployed on the FaaS platform and is usually different from the name of the callback function (which may be stored in the [`code.namespace`/`code.function`](../../trace/semantic_conventions/span-general.md#source-code-attributes) span attributes). + */ FAAS_NAME: 'faas.name', /** @@ -201,282 +200,281 @@ As an alternative, consider setting `faas.id` as a span attribute instead. FAAS_VERSION: 'faas.version', /** - * The execution environment ID as a string, that will be potentially reused for other invocations to the same function/function version. - * - * Note: * **AWS Lambda:** Use the (full) log stream name. - */ + * The execution environment ID as a string, that will be potentially reused for other invocations to the same function/function version. + * + * Note: * **AWS Lambda:** Use the (full) log stream name. + */ FAAS_INSTANCE: 'faas.instance', /** - * The amount of memory available to the serverless function in MiB. - * - * Note: It's recommended to set this attribute since e.g. too little memory can easily stop a Java AWS Lambda function from working correctly. On AWS Lambda, the environment variable `AWS_LAMBDA_FUNCTION_MEMORY_SIZE` provides this information. - */ + * The amount of memory available to the serverless function in MiB. + * + * Note: It's recommended to set this attribute since e.g. too little memory can easily stop a Java AWS Lambda function from working correctly. On AWS Lambda, the environment variable `AWS_LAMBDA_FUNCTION_MEMORY_SIZE` provides this information. + */ FAAS_MAX_MEMORY: 'faas.max_memory', /** - * Unique host ID. For Cloud, this must be the instance_id assigned by the cloud provider. - */ + * Unique host ID. For Cloud, this must be the instance_id assigned by the cloud provider. + */ HOST_ID: 'host.id', /** - * Name of the host. On Unix systems, it may contain what the hostname command returns, or the fully qualified hostname, or another name specified by the user. - */ + * Name of the host. On Unix systems, it may contain what the hostname command returns, or the fully qualified hostname, or another name specified by the user. + */ HOST_NAME: 'host.name', /** - * Type of host. For Cloud, this must be the machine type. - */ + * Type of host. For Cloud, this must be the machine type. + */ HOST_TYPE: 'host.type', /** - * The CPU architecture the host system is running on. - */ + * The CPU architecture the host system is running on. + */ HOST_ARCH: 'host.arch', /** - * Name of the VM image or OS install the host was instantiated from. - */ + * Name of the VM image or OS install the host was instantiated from. + */ HOST_IMAGE_NAME: 'host.image.name', /** - * VM image ID. For Cloud, this value is from the provider. - */ + * VM image ID. For Cloud, this value is from the provider. + */ HOST_IMAGE_ID: 'host.image.id', /** - * The version string of the VM image as defined in [Version SpanAttributes](README.md#version-attributes). - */ + * The version string of the VM image as defined in [Version SpanAttributes](README.md#version-attributes). + */ HOST_IMAGE_VERSION: 'host.image.version', /** - * The name of the cluster. - */ + * The name of the cluster. + */ K8S_CLUSTER_NAME: 'k8s.cluster.name', /** - * The name of the Node. - */ + * The name of the Node. + */ K8S_NODE_NAME: 'k8s.node.name', /** - * The UID of the Node. - */ + * The UID of the Node. + */ K8S_NODE_UID: 'k8s.node.uid', /** - * The name of the namespace that the pod is running in. - */ + * The name of the namespace that the pod is running in. + */ K8S_NAMESPACE_NAME: 'k8s.namespace.name', /** - * The UID of the Pod. - */ + * The UID of the Pod. + */ K8S_POD_UID: 'k8s.pod.uid', /** - * The name of the Pod. - */ + * The name of the Pod. + */ K8S_POD_NAME: 'k8s.pod.name', /** - * The name of the Container in a Pod template. - */ + * The name of the Container in a Pod template. + */ K8S_CONTAINER_NAME: 'k8s.container.name', /** - * The UID of the ReplicaSet. - */ + * The UID of the ReplicaSet. + */ K8S_REPLICASET_UID: 'k8s.replicaset.uid', /** - * The name of the ReplicaSet. - */ + * The name of the ReplicaSet. + */ K8S_REPLICASET_NAME: 'k8s.replicaset.name', /** - * The UID of the Deployment. - */ + * The UID of the Deployment. + */ K8S_DEPLOYMENT_UID: 'k8s.deployment.uid', /** - * The name of the Deployment. - */ + * The name of the Deployment. + */ K8S_DEPLOYMENT_NAME: 'k8s.deployment.name', /** - * The UID of the StatefulSet. - */ + * The UID of the StatefulSet. + */ K8S_STATEFULSET_UID: 'k8s.statefulset.uid', /** - * The name of the StatefulSet. - */ + * The name of the StatefulSet. + */ K8S_STATEFULSET_NAME: 'k8s.statefulset.name', /** - * The UID of the DaemonSet. - */ + * The UID of the DaemonSet. + */ K8S_DAEMONSET_UID: 'k8s.daemonset.uid', /** - * The name of the DaemonSet. - */ + * The name of the DaemonSet. + */ K8S_DAEMONSET_NAME: 'k8s.daemonset.name', /** - * The UID of the Job. - */ + * The UID of the Job. + */ K8S_JOB_UID: 'k8s.job.uid', /** - * The name of the Job. - */ + * The name of the Job. + */ K8S_JOB_NAME: 'k8s.job.name', /** - * The UID of the CronJob. - */ + * The UID of the CronJob. + */ K8S_CRONJOB_UID: 'k8s.cronjob.uid', /** - * The name of the CronJob. - */ + * The name of the CronJob. + */ K8S_CRONJOB_NAME: 'k8s.cronjob.name', /** - * The operating system type. - */ + * The operating system type. + */ OS_TYPE: 'os.type', /** - * Human readable (not intended to be parsed) OS version information, like e.g. reported by `ver` or `lsb_release -a` commands. - */ + * Human readable (not intended to be parsed) OS version information, like e.g. reported by `ver` or `lsb_release -a` commands. + */ OS_DESCRIPTION: 'os.description', /** - * Human readable operating system name. - */ + * Human readable operating system name. + */ OS_NAME: 'os.name', /** - * The version string of the operating system as defined in [Version SpanAttributes](../../resource/semantic_conventions/README.md#version-attributes). - */ + * The version string of the operating system as defined in [Version SpanAttributes](../../resource/semantic_conventions/README.md#version-attributes). + */ OS_VERSION: 'os.version', /** - * Process identifier (PID). - */ + * Process identifier (PID). + */ PROCESS_PID: 'process.pid', /** - * The name of the process executable. On Linux based systems, can be set to the `Name` in `proc/[pid]/status`. On Windows, can be set to the base name of `GetProcessImageFileNameW`. - */ + * The name of the process executable. On Linux based systems, can be set to the `Name` in `proc/[pid]/status`. On Windows, can be set to the base name of `GetProcessImageFileNameW`. + */ PROCESS_EXECUTABLE_NAME: 'process.executable.name', /** - * The full path to the process executable. On Linux based systems, can be set to the target of `proc/[pid]/exe`. On Windows, can be set to the result of `GetProcessImageFileNameW`. - */ + * The full path to the process executable. On Linux based systems, can be set to the target of `proc/[pid]/exe`. On Windows, can be set to the result of `GetProcessImageFileNameW`. + */ PROCESS_EXECUTABLE_PATH: 'process.executable.path', /** - * The command used to launch the process (i.e. the command name). On Linux based systems, can be set to the zeroth string in `proc/[pid]/cmdline`. On Windows, can be set to the first parameter extracted from `GetCommandLineW`. - */ + * The command used to launch the process (i.e. the command name). On Linux based systems, can be set to the zeroth string in `proc/[pid]/cmdline`. On Windows, can be set to the first parameter extracted from `GetCommandLineW`. + */ PROCESS_COMMAND: 'process.command', /** - * The full command used to launch the process as a single string representing the full command. On Windows, can be set to the result of `GetCommandLineW`. Do not set this if you have to assemble it just for monitoring; use `process.command_args` instead. - */ + * The full command used to launch the process as a single string representing the full command. On Windows, can be set to the result of `GetCommandLineW`. Do not set this if you have to assemble it just for monitoring; use `process.command_args` instead. + */ PROCESS_COMMAND_LINE: 'process.command_line', /** - * All the command arguments (including the command/executable itself) as received by the process. On Linux-based systems (and some other Unixoid systems supporting procfs), can be set according to the list of null-delimited strings extracted from `proc/[pid]/cmdline`. For libc-based executables, this would be the full argv vector passed to `main`. - */ + * All the command arguments (including the command/executable itself) as received by the process. On Linux-based systems (and some other Unixoid systems supporting procfs), can be set according to the list of null-delimited strings extracted from `proc/[pid]/cmdline`. For libc-based executables, this would be the full argv vector passed to `main`. + */ PROCESS_COMMAND_ARGS: 'process.command_args', /** - * The username of the user that owns the process. - */ + * The username of the user that owns the process. + */ PROCESS_OWNER: 'process.owner', /** - * The name of the runtime of this process. For compiled native binaries, this SHOULD be the name of the compiler. - */ + * The name of the runtime of this process. For compiled native binaries, this SHOULD be the name of the compiler. + */ PROCESS_RUNTIME_NAME: 'process.runtime.name', /** - * The version of the runtime of this process, as returned by the runtime without modification. - */ + * The version of the runtime of this process, as returned by the runtime without modification. + */ PROCESS_RUNTIME_VERSION: 'process.runtime.version', /** - * An additional description about the runtime of the process, for example a specific vendor customization of the runtime environment. - */ + * An additional description about the runtime of the process, for example a specific vendor customization of the runtime environment. + */ PROCESS_RUNTIME_DESCRIPTION: 'process.runtime.description', /** - * Logical name of the service. - * - * Note: MUST be the same for all instances of horizontally scaled services. If the value was not specified, SDKs MUST fallback to `unknown_service:` concatenated with [`process.executable.name`](process.md#process), e.g. `unknown_service:bash`. If `process.executable.name` is not available, the value MUST be set to `unknown_service`. - */ + * Logical name of the service. + * + * Note: MUST be the same for all instances of horizontally scaled services. If the value was not specified, SDKs MUST fallback to `unknown_service:` concatenated with [`process.executable.name`](process.md#process), e.g. `unknown_service:bash`. If `process.executable.name` is not available, the value MUST be set to `unknown_service`. + */ SERVICE_NAME: 'service.name', /** - * A namespace for `service.name`. - * - * Note: A string value having a meaning that helps to distinguish a group of services, for example the team name that owns a group of services. `service.name` is expected to be unique within the same namespace. If `service.namespace` is not specified in the Resource then `service.name` is expected to be unique for all services that have no explicit namespace defined (so the empty/unspecified namespace is simply one more valid namespace). Zero-length namespace string is assumed equal to unspecified namespace. - */ + * A namespace for `service.name`. + * + * Note: A string value having a meaning that helps to distinguish a group of services, for example the team name that owns a group of services. `service.name` is expected to be unique within the same namespace. If `service.namespace` is not specified in the Resource then `service.name` is expected to be unique for all services that have no explicit namespace defined (so the empty/unspecified namespace is simply one more valid namespace). Zero-length namespace string is assumed equal to unspecified namespace. + */ SERVICE_NAMESPACE: 'service.namespace', /** - * The string ID of the service instance. - * - * Note: MUST be unique for each instance of the same `service.namespace,service.name` pair (in other words `service.namespace,service.name,service.instance.id` triplet MUST be globally unique). The ID helps to distinguish instances of the same service that exist at the same time (e.g. instances of a horizontally scaled service). It is preferable for the ID to be persistent and stay the same for the lifetime of the service instance, however it is acceptable that the ID is ephemeral and changes during important lifetime events for the service (e.g. service restarts). If the service has no inherent unique ID that can be used as the value of this attribute it is recommended to generate a random Version 1 or Version 4 RFC 4122 UUID (services aiming for reproducible UUIDs may also use Version 5, see RFC 4122 for more recommendations). - */ + * The string ID of the service instance. + * + * Note: MUST be unique for each instance of the same `service.namespace,service.name` pair (in other words `service.namespace,service.name,service.instance.id` triplet MUST be globally unique). The ID helps to distinguish instances of the same service that exist at the same time (e.g. instances of a horizontally scaled service). It is preferable for the ID to be persistent and stay the same for the lifetime of the service instance, however it is acceptable that the ID is ephemeral and changes during important lifetime events for the service (e.g. service restarts). If the service has no inherent unique ID that can be used as the value of this attribute it is recommended to generate a random Version 1 or Version 4 RFC 4122 UUID (services aiming for reproducible UUIDs may also use Version 5, see RFC 4122 for more recommendations). + */ SERVICE_INSTANCE_ID: 'service.instance.id', /** - * The version string of the service API or implementation. - */ + * The version string of the service API or implementation. + */ SERVICE_VERSION: 'service.version', /** - * The name of the telemetry SDK as defined above. - */ + * The name of the telemetry SDK as defined above. + */ TELEMETRY_SDK_NAME: 'telemetry.sdk.name', /** - * The language of the telemetry SDK. - */ + * The language of the telemetry SDK. + */ TELEMETRY_SDK_LANGUAGE: 'telemetry.sdk.language', /** - * The version string of the telemetry SDK. - */ + * The version string of the telemetry SDK. + */ TELEMETRY_SDK_VERSION: 'telemetry.sdk.version', /** - * The version string of the auto instrumentation agent, if used. - */ + * The version string of the auto instrumentation agent, if used. + */ TELEMETRY_AUTO_VERSION: 'telemetry.auto.version', /** - * The name of the web engine. - */ + * The name of the web engine. + */ WEBENGINE_NAME: 'webengine.name', /** - * The version of the web engine. - */ + * The version of the web engine. + */ WEBENGINE_VERSION: 'webengine.version', /** - * Additional description of the web engine (e.g. detailed version and edition information). - */ + * Additional description of the web engine (e.g. detailed version and edition information). + */ WEBENGINE_DESCRIPTION: 'webengine.description', }; - export const CloudProviderValues = { /** Alibaba Cloud. */ ALIBABA_CLOUD: 'alibaba_cloud', @@ -487,10 +485,8 @@ export const CloudProviderValues = { /** Google Cloud Platform. */ GCP: 'gcp', } as const; -export type CloudProviderValues = typeof CloudProviderValues[keyof typeof CloudProviderValues]; - - - +export type CloudProviderValues = + typeof CloudProviderValues[keyof typeof CloudProviderValues]; export const CloudPlatformValues = { /** Alibaba Cloud Elastic Compute Service. */ @@ -528,10 +524,8 @@ export const CloudPlatformValues = { /** Google Cloud App Engine (GAE). */ GCP_APP_ENGINE: 'gcp_app_engine', } as const; -export type CloudPlatformValues = typeof CloudPlatformValues[keyof typeof CloudPlatformValues]; - - - +export type CloudPlatformValues = + typeof CloudPlatformValues[keyof typeof CloudPlatformValues]; export const AwsEcsLaunchtypeValues = { /** ec2. */ @@ -539,10 +533,8 @@ export const AwsEcsLaunchtypeValues = { /** fargate. */ FARGATE: 'fargate', } as const; -export type AwsEcsLaunchtypeValues = typeof AwsEcsLaunchtypeValues[keyof typeof AwsEcsLaunchtypeValues]; - - - +export type AwsEcsLaunchtypeValues = + typeof AwsEcsLaunchtypeValues[keyof typeof AwsEcsLaunchtypeValues]; export const HostArchValues = { /** AMD64. */ @@ -562,9 +554,6 @@ export const HostArchValues = { } as const; export type HostArchValues = typeof HostArchValues[keyof typeof HostArchValues]; - - - export const OsTypeValues = { /** Microsoft Windows. */ WINDOWS: 'windows', @@ -591,9 +580,6 @@ export const OsTypeValues = { } as const; export type OsTypeValues = typeof OsTypeValues[keyof typeof OsTypeValues]; - - - export const TelemetrySdkLanguageValues = { /** cpp. */ CPP: 'cpp', @@ -616,5 +602,5 @@ export const TelemetrySdkLanguageValues = { /** webjs. */ WEBJS: 'webjs', } as const; -export type TelemetrySdkLanguageValues = typeof TelemetrySdkLanguageValues[keyof typeof TelemetrySdkLanguageValues]; - +export type TelemetrySdkLanguageValues = + typeof TelemetrySdkLanguageValues[keyof typeof TelemetrySdkLanguageValues]; diff --git a/packages/opentelemetry-semantic-conventions/src/trace/SemanticAttributes.ts b/packages/opentelemetry-semantic-conventions/src/trace/SemanticAttributes.ts index 4f653555ed..8bfc4a141d 100644 --- a/packages/opentelemetry-semantic-conventions/src/trace/SemanticAttributes.ts +++ b/packages/opentelemetry-semantic-conventions/src/trace/SemanticAttributes.ts @@ -16,139 +16,139 @@ // DO NOT EDIT, this is an Auto-generated file from scripts/semconv/templates//templates/SemanticAttributes.ts.j2 export const SemanticAttributes = { - /** - * The full invoked ARN as provided on the `Context` passed to the function (`Lambda-Runtime-Invoked-Function-Arn` header on the `/runtime/invocation/next` applicable). - * - * Note: This may be different from `faas.id` if an alias is involved. - */ + * The full invoked ARN as provided on the `Context` passed to the function (`Lambda-Runtime-Invoked-Function-Arn` header on the `/runtime/invocation/next` applicable). + * + * Note: This may be different from `faas.id` if an alias is involved. + */ AWS_LAMBDA_INVOKED_ARN: 'aws.lambda.invoked_arn', /** - * An identifier for the database management system (DBMS) product being used. See below for a list of well-known identifiers. - */ + * An identifier for the database management system (DBMS) product being used. See below for a list of well-known identifiers. + */ DB_SYSTEM: 'db.system', /** - * The connection string used to connect to the database. It is recommended to remove embedded credentials. - */ + * The connection string used to connect to the database. It is recommended to remove embedded credentials. + */ DB_CONNECTION_STRING: 'db.connection_string', /** - * Username for accessing the database. - */ + * Username for accessing the database. + */ DB_USER: 'db.user', /** - * The fully-qualified class name of the [Java Database Connectivity (JDBC)](https://docs.oracle.com/javase/8/docs/technotes/guides/jdbc/) driver used to connect. - */ + * The fully-qualified class name of the [Java Database Connectivity (JDBC)](https://docs.oracle.com/javase/8/docs/technotes/guides/jdbc/) driver used to connect. + */ DB_JDBC_DRIVER_CLASSNAME: 'db.jdbc.driver_classname', /** - * If no [tech-specific attribute](#call-level-attributes-for-specific-technologies) is defined, this attribute is used to report the name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails). - * - * Note: In some SQL databases, the database name to be used is called "schema name". - */ + * If no [tech-specific attribute](#call-level-attributes-for-specific-technologies) is defined, this attribute is used to report the name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails). + * + * Note: In some SQL databases, the database name to be used is called "schema name". + */ DB_NAME: 'db.name', /** - * The database statement being executed. - * - * Note: The value may be sanitized to exclude sensitive information. - */ + * The database statement being executed. + * + * Note: The value may be sanitized to exclude sensitive information. + */ DB_STATEMENT: 'db.statement', /** - * The name of the operation being executed, e.g. the [MongoDB command name](https://docs.mongodb.com/manual/reference/command/#database-operations) such as `findAndModify`, or the SQL keyword. - * - * Note: When setting this to an SQL keyword, it is not recommended to attempt any client-side parsing of `db.statement` just to get this property, but it should be set if the operation name is provided by the library being instrumented. If the SQL statement has an ambiguous operation, or performs more than one operation, this value may be omitted. - */ + * The name of the operation being executed, e.g. the [MongoDB command name](https://docs.mongodb.com/manual/reference/command/#database-operations) such as `findAndModify`, or the SQL keyword. + * + * Note: When setting this to an SQL keyword, it is not recommended to attempt any client-side parsing of `db.statement` just to get this property, but it should be set if the operation name is provided by the library being instrumented. If the SQL statement has an ambiguous operation, or performs more than one operation, this value may be omitted. + */ DB_OPERATION: 'db.operation', /** - * The Microsoft SQL Server [instance name](https://docs.microsoft.com/en-us/sql/connect/jdbc/building-the-connection-url?view=sql-server-ver15) connecting to. This name is used to determine the port of a named instance. - * - * Note: If setting a `db.mssql.instance_name`, `net.peer.port` is no longer required (but still recommended if non-standard). - */ + * The Microsoft SQL Server [instance name](https://docs.microsoft.com/en-us/sql/connect/jdbc/building-the-connection-url?view=sql-server-ver15) connecting to. This name is used to determine the port of a named instance. + * + * Note: If setting a `db.mssql.instance_name`, `net.peer.port` is no longer required (but still recommended if non-standard). + */ DB_MSSQL_INSTANCE_NAME: 'db.mssql.instance_name', /** - * The name of the keyspace being accessed. To be used instead of the generic `db.name` attribute. - */ + * The name of the keyspace being accessed. To be used instead of the generic `db.name` attribute. + */ DB_CASSANDRA_KEYSPACE: 'db.cassandra.keyspace', /** - * The fetch size used for paging, i.e. how many rows will be returned at once. - */ + * The fetch size used for paging, i.e. how many rows will be returned at once. + */ DB_CASSANDRA_PAGE_SIZE: 'db.cassandra.page_size', /** - * The consistency level of the query. Based on consistency values from [CQL](https://docs.datastax.com/en/cassandra-oss/3.0/cassandra/dml/dmlConfigConsistency.html). - */ + * The consistency level of the query. Based on consistency values from [CQL](https://docs.datastax.com/en/cassandra-oss/3.0/cassandra/dml/dmlConfigConsistency.html). + */ DB_CASSANDRA_CONSISTENCY_LEVEL: 'db.cassandra.consistency_level', /** - * The name of the primary table that the operation is acting upon, including the schema name (if applicable). - * - * Note: This mirrors the db.sql.table attribute but references cassandra rather than sql. It is not recommended to attempt any client-side parsing of `db.statement` just to get this property, but it should be set if it is provided by the library being instrumented. If the operation is acting upon an anonymous table, or more than one table, this value MUST NOT be set. - */ + * The name of the primary table that the operation is acting upon, including the schema name (if applicable). + * + * Note: This mirrors the db.sql.table attribute but references cassandra rather than sql. It is not recommended to attempt any client-side parsing of `db.statement` just to get this property, but it should be set if it is provided by the library being instrumented. If the operation is acting upon an anonymous table, or more than one table, this value MUST NOT be set. + */ DB_CASSANDRA_TABLE: 'db.cassandra.table', /** - * Whether or not the query is idempotent. - */ + * Whether or not the query is idempotent. + */ DB_CASSANDRA_IDEMPOTENCE: 'db.cassandra.idempotence', /** - * The number of times a query was speculatively executed. Not set or `0` if the query was not executed speculatively. - */ - DB_CASSANDRA_SPECULATIVE_EXECUTION_COUNT: 'db.cassandra.speculative_execution_count', + * The number of times a query was speculatively executed. Not set or `0` if the query was not executed speculatively. + */ + DB_CASSANDRA_SPECULATIVE_EXECUTION_COUNT: + 'db.cassandra.speculative_execution_count', /** - * The ID of the coordinating node for a query. - */ + * The ID of the coordinating node for a query. + */ DB_CASSANDRA_COORDINATOR_ID: 'db.cassandra.coordinator.id', /** - * The data center of the coordinating node for a query. - */ + * The data center of the coordinating node for a query. + */ DB_CASSANDRA_COORDINATOR_DC: 'db.cassandra.coordinator.dc', /** - * The [HBase namespace](https://hbase.apache.org/book.html#_namespace) being accessed. To be used instead of the generic `db.name` attribute. - */ + * The [HBase namespace](https://hbase.apache.org/book.html#_namespace) being accessed. To be used instead of the generic `db.name` attribute. + */ DB_HBASE_NAMESPACE: 'db.hbase.namespace', /** - * The index of the database being accessed as used in the [`SELECT` command](https://redis.io/commands/select), provided as an integer. To be used instead of the generic `db.name` attribute. - */ + * The index of the database being accessed as used in the [`SELECT` command](https://redis.io/commands/select), provided as an integer. To be used instead of the generic `db.name` attribute. + */ DB_REDIS_DATABASE_INDEX: 'db.redis.database_index', /** - * The collection being accessed within the database stated in `db.name`. - */ + * The collection being accessed within the database stated in `db.name`. + */ DB_MONGODB_COLLECTION: 'db.mongodb.collection', /** - * The name of the primary table that the operation is acting upon, including the schema name (if applicable). - * - * Note: It is not recommended to attempt any client-side parsing of `db.statement` just to get this property, but it should be set if it is provided by the library being instrumented. If the operation is acting upon an anonymous table, or more than one table, this value MUST NOT be set. - */ + * The name of the primary table that the operation is acting upon, including the schema name (if applicable). + * + * Note: It is not recommended to attempt any client-side parsing of `db.statement` just to get this property, but it should be set if it is provided by the library being instrumented. If the operation is acting upon an anonymous table, or more than one table, this value MUST NOT be set. + */ DB_SQL_TABLE: 'db.sql.table', /** - * The type of the exception (its fully-qualified class name, if applicable). The dynamic type of the exception should be preferred over the static type in languages that support it. - */ + * The type of the exception (its fully-qualified class name, if applicable). The dynamic type of the exception should be preferred over the static type in languages that support it. + */ EXCEPTION_TYPE: 'exception.type', /** - * The exception message. - */ + * The exception message. + */ EXCEPTION_MESSAGE: 'exception.message', /** - * A stacktrace as a string in the natural representation for the language runtime. The representation is to be determined and documented by each language SIG. - */ + * A stacktrace as a string in the natural representation for the language runtime. The representation is to be determined and documented by each language SIG. + */ EXCEPTION_STACKTRACE: 'exception.stacktrace', /** @@ -174,262 +174,264 @@ clear whether the exception will escape. EXCEPTION_ESCAPED: 'exception.escaped', /** - * Type of the trigger on which the function is executed. - */ + * Type of the trigger on which the function is executed. + */ FAAS_TRIGGER: 'faas.trigger', /** - * The execution ID of the current function execution. - */ + * The execution ID of the current function execution. + */ FAAS_EXECUTION: 'faas.execution', /** - * The name of the source on which the triggering operation was performed. For example, in Cloud Storage or S3 corresponds to the bucket name, and in Cosmos DB to the database name. - */ + * The name of the source on which the triggering operation was performed. For example, in Cloud Storage or S3 corresponds to the bucket name, and in Cosmos DB to the database name. + */ FAAS_DOCUMENT_COLLECTION: 'faas.document.collection', /** - * Describes the type of the operation that was performed on the data. - */ + * Describes the type of the operation that was performed on the data. + */ FAAS_DOCUMENT_OPERATION: 'faas.document.operation', /** - * A string containing the time when the data was accessed in the [ISO 8601](https://www.iso.org/iso-8601-date-and-time-format.html) format expressed in [UTC](https://www.w3.org/TR/NOTE-datetime). - */ + * A string containing the time when the data was accessed in the [ISO 8601](https://www.iso.org/iso-8601-date-and-time-format.html) format expressed in [UTC](https://www.w3.org/TR/NOTE-datetime). + */ FAAS_DOCUMENT_TIME: 'faas.document.time', /** - * The document name/table subjected to the operation. For example, in Cloud Storage or S3 is the name of the file, and in Cosmos DB the table name. - */ + * The document name/table subjected to the operation. For example, in Cloud Storage or S3 is the name of the file, and in Cosmos DB the table name. + */ FAAS_DOCUMENT_NAME: 'faas.document.name', /** - * A string containing the function invocation time in the [ISO 8601](https://www.iso.org/iso-8601-date-and-time-format.html) format expressed in [UTC](https://www.w3.org/TR/NOTE-datetime). - */ + * A string containing the function invocation time in the [ISO 8601](https://www.iso.org/iso-8601-date-and-time-format.html) format expressed in [UTC](https://www.w3.org/TR/NOTE-datetime). + */ FAAS_TIME: 'faas.time', /** - * A string containing the schedule period as [Cron Expression](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm). - */ + * A string containing the schedule period as [Cron Expression](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm). + */ FAAS_CRON: 'faas.cron', /** - * A boolean that is true if the serverless function is executed for the first time (aka cold-start). - */ + * A boolean that is true if the serverless function is executed for the first time (aka cold-start). + */ FAAS_COLDSTART: 'faas.coldstart', /** - * The name of the invoked function. - * - * Note: SHOULD be equal to the `faas.name` resource attribute of the invoked function. - */ + * The name of the invoked function. + * + * Note: SHOULD be equal to the `faas.name` resource attribute of the invoked function. + */ FAAS_INVOKED_NAME: 'faas.invoked_name', /** - * The cloud provider of the invoked function. - * - * Note: SHOULD be equal to the `cloud.provider` resource attribute of the invoked function. - */ + * The cloud provider of the invoked function. + * + * Note: SHOULD be equal to the `cloud.provider` resource attribute of the invoked function. + */ FAAS_INVOKED_PROVIDER: 'faas.invoked_provider', /** - * The cloud region of the invoked function. - * - * Note: SHOULD be equal to the `cloud.region` resource attribute of the invoked function. - */ + * The cloud region of the invoked function. + * + * Note: SHOULD be equal to the `cloud.region` resource attribute of the invoked function. + */ FAAS_INVOKED_REGION: 'faas.invoked_region', /** - * Transport protocol used. See note below. - */ + * Transport protocol used. See note below. + */ NET_TRANSPORT: 'net.transport', /** - * Remote address of the peer (dotted decimal for IPv4 or [RFC5952](https://tools.ietf.org/html/rfc5952) for IPv6). - */ + * Remote address of the peer (dotted decimal for IPv4 or [RFC5952](https://tools.ietf.org/html/rfc5952) for IPv6). + */ NET_PEER_IP: 'net.peer.ip', /** - * Remote port number. - */ + * Remote port number. + */ NET_PEER_PORT: 'net.peer.port', /** - * Remote hostname or similar, see note below. - */ + * Remote hostname or similar, see note below. + */ NET_PEER_NAME: 'net.peer.name', /** - * Like `net.peer.ip` but for the host IP. Useful in case of a multi-IP host. - */ + * Like `net.peer.ip` but for the host IP. Useful in case of a multi-IP host. + */ NET_HOST_IP: 'net.host.ip', /** - * Like `net.peer.port` but for the host port. - */ + * Like `net.peer.port` but for the host port. + */ NET_HOST_PORT: 'net.host.port', /** - * Local hostname or similar, see note below. - */ + * Local hostname or similar, see note below. + */ NET_HOST_NAME: 'net.host.name', /** - * The internet connection type currently being used by the host. - */ + * The internet connection type currently being used by the host. + */ NET_HOST_CONNECTION_TYPE: 'net.host.connection.type', /** - * This describes more details regarding the connection.type. It may be the type of cell technology connection, but it could be used for describing details about a wifi connection. - */ + * This describes more details regarding the connection.type. It may be the type of cell technology connection, but it could be used for describing details about a wifi connection. + */ NET_HOST_CONNECTION_SUBTYPE: 'net.host.connection.subtype', /** - * The name of the mobile carrier. - */ + * The name of the mobile carrier. + */ NET_HOST_CARRIER_NAME: 'net.host.carrier.name', /** - * The mobile carrier country code. - */ + * The mobile carrier country code. + */ NET_HOST_CARRIER_MCC: 'net.host.carrier.mcc', /** - * The mobile carrier network code. - */ + * The mobile carrier network code. + */ NET_HOST_CARRIER_MNC: 'net.host.carrier.mnc', /** - * The ISO 3166-1 alpha-2 2-character country code associated with the mobile carrier network. - */ + * The ISO 3166-1 alpha-2 2-character country code associated with the mobile carrier network. + */ NET_HOST_CARRIER_ICC: 'net.host.carrier.icc', /** - * The [`service.name`](../../resource/semantic_conventions/README.md#service) of the remote service. SHOULD be equal to the actual `service.name` resource attribute of the remote service if any. - */ + * The [`service.name`](../../resource/semantic_conventions/README.md#service) of the remote service. SHOULD be equal to the actual `service.name` resource attribute of the remote service if any. + */ PEER_SERVICE: 'peer.service', /** - * Username or client_id extracted from the access token or [Authorization](https://tools.ietf.org/html/rfc7235#section-4.2) header in the inbound request from outside the system. - */ + * Username or client_id extracted from the access token or [Authorization](https://tools.ietf.org/html/rfc7235#section-4.2) header in the inbound request from outside the system. + */ ENDUSER_ID: 'enduser.id', /** - * Actual/assumed role the client is making the request under extracted from token or application security context. - */ + * Actual/assumed role the client is making the request under extracted from token or application security context. + */ ENDUSER_ROLE: 'enduser.role', /** - * Scopes or granted authorities the client currently possesses extracted from token or application security context. The value would come from the scope associated with an [OAuth 2.0 Access Token](https://tools.ietf.org/html/rfc6749#section-3.3) or an attribute value in a [SAML 2.0 Assertion](http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-tech-overview-2.0.html). - */ + * Scopes or granted authorities the client currently possesses extracted from token or application security context. The value would come from the scope associated with an [OAuth 2.0 Access Token](https://tools.ietf.org/html/rfc6749#section-3.3) or an attribute value in a [SAML 2.0 Assertion](http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-tech-overview-2.0.html). + */ ENDUSER_SCOPE: 'enduser.scope', /** - * Current "managed" thread ID (as opposed to OS thread ID). - */ + * Current "managed" thread ID (as opposed to OS thread ID). + */ THREAD_ID: 'thread.id', /** - * Current thread name. - */ + * Current thread name. + */ THREAD_NAME: 'thread.name', /** - * The method or function name, or equivalent (usually rightmost part of the code unit's name). - */ + * The method or function name, or equivalent (usually rightmost part of the code unit's name). + */ CODE_FUNCTION: 'code.function', /** - * The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit. - */ + * The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit. + */ CODE_NAMESPACE: 'code.namespace', /** - * The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path). - */ + * The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path). + */ CODE_FILEPATH: 'code.filepath', /** - * The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`. - */ + * The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`. + */ CODE_LINENO: 'code.lineno', /** - * HTTP request method. - */ + * HTTP request method. + */ HTTP_METHOD: 'http.method', /** - * Full HTTP request URL in the form `scheme://host[:port]/path?query[#fragment]`. Usually the fragment is not transmitted over HTTP, but if it is known, it should be included nevertheless. - * - * Note: `http.url` MUST NOT contain credentials passed via URL in form of `https://username:password@www.example.com/`. In such case the attribute's value should be `https://www.example.com/`. - */ + * Full HTTP request URL in the form `scheme://host[:port]/path?query[#fragment]`. Usually the fragment is not transmitted over HTTP, but if it is known, it should be included nevertheless. + * + * Note: `http.url` MUST NOT contain credentials passed via URL in form of `https://username:password@www.example.com/`. In such case the attribute's value should be `https://www.example.com/`. + */ HTTP_URL: 'http.url', /** - * The full request target as passed in a HTTP request line or equivalent. - */ + * The full request target as passed in a HTTP request line or equivalent. + */ HTTP_TARGET: 'http.target', /** - * The value of the [HTTP host header](https://tools.ietf.org/html/rfc7230#section-5.4). An empty Host header should also be reported, see note. - * - * Note: When the header is present but empty the attribute SHOULD be set to the empty string. Note that this is a valid situation that is expected in certain cases, according the aforementioned [section of RFC 7230](https://tools.ietf.org/html/rfc7230#section-5.4). When the header is not set the attribute MUST NOT be set. - */ + * The value of the [HTTP host header](https://tools.ietf.org/html/rfc7230#section-5.4). An empty Host header should also be reported, see note. + * + * Note: When the header is present but empty the attribute SHOULD be set to the empty string. Note that this is a valid situation that is expected in certain cases, according the aforementioned [section of RFC 7230](https://tools.ietf.org/html/rfc7230#section-5.4). When the header is not set the attribute MUST NOT be set. + */ HTTP_HOST: 'http.host', /** - * The URI scheme identifying the used protocol. - */ + * The URI scheme identifying the used protocol. + */ HTTP_SCHEME: 'http.scheme', /** - * [HTTP response status code](https://tools.ietf.org/html/rfc7231#section-6). - */ + * [HTTP response status code](https://tools.ietf.org/html/rfc7231#section-6). + */ HTTP_STATUS_CODE: 'http.status_code', /** - * Kind of HTTP protocol used. - * - * Note: If `net.transport` is not specified, it can be assumed to be `IP.TCP` except if `http.flavor` is `QUIC`, in which case `IP.UDP` is assumed. - */ + * Kind of HTTP protocol used. + * + * Note: If `net.transport` is not specified, it can be assumed to be `IP.TCP` except if `http.flavor` is `QUIC`, in which case `IP.UDP` is assumed. + */ HTTP_FLAVOR: 'http.flavor', /** - * Value of the [HTTP User-Agent](https://tools.ietf.org/html/rfc7231#section-5.5.3) header sent by the client. - */ + * Value of the [HTTP User-Agent](https://tools.ietf.org/html/rfc7231#section-5.5.3) header sent by the client. + */ HTTP_USER_AGENT: 'http.user_agent', /** - * The size of the request payload body in bytes. This is the number of bytes transferred excluding headers and is often, but not always, present as the [Content-Length](https://tools.ietf.org/html/rfc7230#section-3.3.2) header. For requests using transport encoding, this should be the compressed size. - */ + * The size of the request payload body in bytes. This is the number of bytes transferred excluding headers and is often, but not always, present as the [Content-Length](https://tools.ietf.org/html/rfc7230#section-3.3.2) header. For requests using transport encoding, this should be the compressed size. + */ HTTP_REQUEST_CONTENT_LENGTH: 'http.request_content_length', /** - * The size of the uncompressed request payload body after transport decoding. Not set if transport encoding not used. - */ - HTTP_REQUEST_CONTENT_LENGTH_UNCOMPRESSED: 'http.request_content_length_uncompressed', + * The size of the uncompressed request payload body after transport decoding. Not set if transport encoding not used. + */ + HTTP_REQUEST_CONTENT_LENGTH_UNCOMPRESSED: + 'http.request_content_length_uncompressed', /** - * The size of the response payload body in bytes. This is the number of bytes transferred excluding headers and is often, but not always, present as the [Content-Length](https://tools.ietf.org/html/rfc7230#section-3.3.2) header. For requests using transport encoding, this should be the compressed size. - */ + * The size of the response payload body in bytes. This is the number of bytes transferred excluding headers and is often, but not always, present as the [Content-Length](https://tools.ietf.org/html/rfc7230#section-3.3.2) header. For requests using transport encoding, this should be the compressed size. + */ HTTP_RESPONSE_CONTENT_LENGTH: 'http.response_content_length', /** - * The size of the uncompressed response payload body after transport decoding. Not set if transport encoding not used. - */ - HTTP_RESPONSE_CONTENT_LENGTH_UNCOMPRESSED: 'http.response_content_length_uncompressed', + * The size of the uncompressed response payload body after transport decoding. Not set if transport encoding not used. + */ + HTTP_RESPONSE_CONTENT_LENGTH_UNCOMPRESSED: + 'http.response_content_length_uncompressed', /** - * The primary server name of the matched virtual host. This should be obtained via configuration. If no such configuration can be obtained, this attribute MUST NOT be set ( `net.host.name` should be used instead). - * - * Note: `http.url` is usually not readily available on the server side but would have to be assembled in a cumbersome and sometimes lossy process from other information (see e.g. open-telemetry/opentelemetry-python/pull/148). It is thus preferred to supply the raw data that is available. - */ + * The primary server name of the matched virtual host. This should be obtained via configuration. If no such configuration can be obtained, this attribute MUST NOT be set ( `net.host.name` should be used instead). + * + * Note: `http.url` is usually not readily available on the server side but would have to be assembled in a cumbersome and sometimes lossy process from other information (see e.g. open-telemetry/opentelemetry-python/pull/148). It is thus preferred to supply the raw data that is available. + */ HTTP_SERVER_NAME: 'http.server_name', /** - * The matched route (path template). - */ + * The matched route (path template). + */ HTTP_ROUTE: 'http.route', /** @@ -450,280 +452,284 @@ the closest proxy. HTTP_CLIENT_IP: 'http.client_ip', /** - * The keys in the `RequestItems` object field. - */ + * The keys in the `RequestItems` object field. + */ AWS_DYNAMODB_TABLE_NAMES: 'aws.dynamodb.table_names', /** - * The JSON-serialized value of each item in the `ConsumedCapacity` response field. - */ + * The JSON-serialized value of each item in the `ConsumedCapacity` response field. + */ AWS_DYNAMODB_CONSUMED_CAPACITY: 'aws.dynamodb.consumed_capacity', /** - * The JSON-serialized value of the `ItemCollectionMetrics` response field. - */ + * The JSON-serialized value of the `ItemCollectionMetrics` response field. + */ AWS_DYNAMODB_ITEM_COLLECTION_METRICS: 'aws.dynamodb.item_collection_metrics', /** - * The value of the `ProvisionedThroughput.ReadCapacityUnits` request parameter. - */ - AWS_DYNAMODB_PROVISIONED_READ_CAPACITY: 'aws.dynamodb.provisioned_read_capacity', + * The value of the `ProvisionedThroughput.ReadCapacityUnits` request parameter. + */ + AWS_DYNAMODB_PROVISIONED_READ_CAPACITY: + 'aws.dynamodb.provisioned_read_capacity', /** - * The value of the `ProvisionedThroughput.WriteCapacityUnits` request parameter. - */ - AWS_DYNAMODB_PROVISIONED_WRITE_CAPACITY: 'aws.dynamodb.provisioned_write_capacity', + * The value of the `ProvisionedThroughput.WriteCapacityUnits` request parameter. + */ + AWS_DYNAMODB_PROVISIONED_WRITE_CAPACITY: + 'aws.dynamodb.provisioned_write_capacity', /** - * The value of the `ConsistentRead` request parameter. - */ + * The value of the `ConsistentRead` request parameter. + */ AWS_DYNAMODB_CONSISTENT_READ: 'aws.dynamodb.consistent_read', /** - * The value of the `ProjectionExpression` request parameter. - */ + * The value of the `ProjectionExpression` request parameter. + */ AWS_DYNAMODB_PROJECTION: 'aws.dynamodb.projection', /** - * The value of the `Limit` request parameter. - */ + * The value of the `Limit` request parameter. + */ AWS_DYNAMODB_LIMIT: 'aws.dynamodb.limit', /** - * The value of the `AttributesToGet` request parameter. - */ + * The value of the `AttributesToGet` request parameter. + */ AWS_DYNAMODB_ATTRIBUTES_TO_GET: 'aws.dynamodb.attributes_to_get', /** - * The value of the `IndexName` request parameter. - */ + * The value of the `IndexName` request parameter. + */ AWS_DYNAMODB_INDEX_NAME: 'aws.dynamodb.index_name', /** - * The value of the `Select` request parameter. - */ + * The value of the `Select` request parameter. + */ AWS_DYNAMODB_SELECT: 'aws.dynamodb.select', /** - * The JSON-serialized value of each item of the `GlobalSecondaryIndexes` request field. - */ - AWS_DYNAMODB_GLOBAL_SECONDARY_INDEXES: 'aws.dynamodb.global_secondary_indexes', + * The JSON-serialized value of each item of the `GlobalSecondaryIndexes` request field. + */ + AWS_DYNAMODB_GLOBAL_SECONDARY_INDEXES: + 'aws.dynamodb.global_secondary_indexes', /** - * The JSON-serialized value of each item of the `LocalSecondaryIndexes` request field. - */ + * The JSON-serialized value of each item of the `LocalSecondaryIndexes` request field. + */ AWS_DYNAMODB_LOCAL_SECONDARY_INDEXES: 'aws.dynamodb.local_secondary_indexes', /** - * The value of the `ExclusiveStartTableName` request parameter. - */ + * The value of the `ExclusiveStartTableName` request parameter. + */ AWS_DYNAMODB_EXCLUSIVE_START_TABLE: 'aws.dynamodb.exclusive_start_table', /** - * The the number of items in the `TableNames` response parameter. - */ + * The the number of items in the `TableNames` response parameter. + */ AWS_DYNAMODB_TABLE_COUNT: 'aws.dynamodb.table_count', /** - * The value of the `ScanIndexForward` request parameter. - */ + * The value of the `ScanIndexForward` request parameter. + */ AWS_DYNAMODB_SCAN_FORWARD: 'aws.dynamodb.scan_forward', /** - * The value of the `Segment` request parameter. - */ + * The value of the `Segment` request parameter. + */ AWS_DYNAMODB_SEGMENT: 'aws.dynamodb.segment', /** - * The value of the `TotalSegments` request parameter. - */ + * The value of the `TotalSegments` request parameter. + */ AWS_DYNAMODB_TOTAL_SEGMENTS: 'aws.dynamodb.total_segments', /** - * The value of the `Count` response parameter. - */ + * The value of the `Count` response parameter. + */ AWS_DYNAMODB_COUNT: 'aws.dynamodb.count', /** - * The value of the `ScannedCount` response parameter. - */ + * The value of the `ScannedCount` response parameter. + */ AWS_DYNAMODB_SCANNED_COUNT: 'aws.dynamodb.scanned_count', /** - * The JSON-serialized value of each item in the `AttributeDefinitions` request field. - */ + * The JSON-serialized value of each item in the `AttributeDefinitions` request field. + */ AWS_DYNAMODB_ATTRIBUTE_DEFINITIONS: 'aws.dynamodb.attribute_definitions', /** - * The JSON-serialized value of each item in the the `GlobalSecondaryIndexUpdates` request field. - */ - AWS_DYNAMODB_GLOBAL_SECONDARY_INDEX_UPDATES: 'aws.dynamodb.global_secondary_index_updates', + * The JSON-serialized value of each item in the the `GlobalSecondaryIndexUpdates` request field. + */ + AWS_DYNAMODB_GLOBAL_SECONDARY_INDEX_UPDATES: + 'aws.dynamodb.global_secondary_index_updates', /** - * A string identifying the messaging system. - */ + * A string identifying the messaging system. + */ MESSAGING_SYSTEM: 'messaging.system', /** - * The message destination name. This might be equal to the span name but is required nevertheless. - */ + * The message destination name. This might be equal to the span name but is required nevertheless. + */ MESSAGING_DESTINATION: 'messaging.destination', /** - * The kind of message destination. - */ + * The kind of message destination. + */ MESSAGING_DESTINATION_KIND: 'messaging.destination_kind', /** - * A boolean that is true if the message destination is temporary. - */ + * A boolean that is true if the message destination is temporary. + */ MESSAGING_TEMP_DESTINATION: 'messaging.temp_destination', /** - * The name of the transport protocol. - */ + * The name of the transport protocol. + */ MESSAGING_PROTOCOL: 'messaging.protocol', /** - * The version of the transport protocol. - */ + * The version of the transport protocol. + */ MESSAGING_PROTOCOL_VERSION: 'messaging.protocol_version', /** - * Connection string. - */ + * Connection string. + */ MESSAGING_URL: 'messaging.url', /** - * A value used by the messaging system as an identifier for the message, represented as a string. - */ + * A value used by the messaging system as an identifier for the message, represented as a string. + */ MESSAGING_MESSAGE_ID: 'messaging.message_id', /** - * The [conversation ID](#conversations) identifying the conversation to which the message belongs, represented as a string. Sometimes called "Correlation ID". - */ + * The [conversation ID](#conversations) identifying the conversation to which the message belongs, represented as a string. Sometimes called "Correlation ID". + */ MESSAGING_CONVERSATION_ID: 'messaging.conversation_id', /** - * The (uncompressed) size of the message payload in bytes. Also use this attribute if it is unknown whether the compressed or uncompressed payload size is reported. - */ + * The (uncompressed) size of the message payload in bytes. Also use this attribute if it is unknown whether the compressed or uncompressed payload size is reported. + */ MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: 'messaging.message_payload_size_bytes', /** - * The compressed size of the message payload in bytes. - */ - MESSAGING_MESSAGE_PAYLOAD_COMPRESSED_SIZE_BYTES: 'messaging.message_payload_compressed_size_bytes', + * The compressed size of the message payload in bytes. + */ + MESSAGING_MESSAGE_PAYLOAD_COMPRESSED_SIZE_BYTES: + 'messaging.message_payload_compressed_size_bytes', /** - * A string identifying the kind of message consumption as defined in the [Operation names](#operation-names) section above. If the operation is "send", this attribute MUST NOT be set, since the operation can be inferred from the span kind in that case. - */ + * A string identifying the kind of message consumption as defined in the [Operation names](#operation-names) section above. If the operation is "send", this attribute MUST NOT be set, since the operation can be inferred from the span kind in that case. + */ MESSAGING_OPERATION: 'messaging.operation', /** - * The identifier for the consumer receiving a message. For Kafka, set it to `{messaging.kafka.consumer_group} - {messaging.kafka.client_id}`, if both are present, or only `messaging.kafka.consumer_group`. For brokers, such as RabbitMQ and Artemis, set it to the `client_id` of the client consuming the message. - */ + * The identifier for the consumer receiving a message. For Kafka, set it to `{messaging.kafka.consumer_group} - {messaging.kafka.client_id}`, if both are present, or only `messaging.kafka.consumer_group`. For brokers, such as RabbitMQ and Artemis, set it to the `client_id` of the client consuming the message. + */ MESSAGING_CONSUMER_ID: 'messaging.consumer_id', /** - * RabbitMQ message routing key. - */ + * RabbitMQ message routing key. + */ MESSAGING_RABBITMQ_ROUTING_KEY: 'messaging.rabbitmq.routing_key', /** - * Message keys in Kafka are used for grouping alike messages to ensure they're processed on the same partition. They differ from `messaging.message_id` in that they're not unique. If the key is `null`, the attribute MUST NOT be set. - * - * Note: If the key type is not string, it's string representation has to be supplied for the attribute. If the key has no unambiguous, canonical string form, don't include its value. - */ + * Message keys in Kafka are used for grouping alike messages to ensure they're processed on the same partition. They differ from `messaging.message_id` in that they're not unique. If the key is `null`, the attribute MUST NOT be set. + * + * Note: If the key type is not string, it's string representation has to be supplied for the attribute. If the key has no unambiguous, canonical string form, don't include its value. + */ MESSAGING_KAFKA_MESSAGE_KEY: 'messaging.kafka.message_key', /** - * Name of the Kafka Consumer Group that is handling the message. Only applies to consumers, not producers. - */ + * Name of the Kafka Consumer Group that is handling the message. Only applies to consumers, not producers. + */ MESSAGING_KAFKA_CONSUMER_GROUP: 'messaging.kafka.consumer_group', /** - * Client Id for the Consumer or Producer that is handling the message. - */ + * Client Id for the Consumer or Producer that is handling the message. + */ MESSAGING_KAFKA_CLIENT_ID: 'messaging.kafka.client_id', /** - * Partition the message is sent to. - */ + * Partition the message is sent to. + */ MESSAGING_KAFKA_PARTITION: 'messaging.kafka.partition', /** - * A boolean that is true if the message is a tombstone. - */ + * A boolean that is true if the message is a tombstone. + */ MESSAGING_KAFKA_TOMBSTONE: 'messaging.kafka.tombstone', /** - * A string identifying the remoting system. - */ + * A string identifying the remoting system. + */ RPC_SYSTEM: 'rpc.system', /** - * The full (logical) name of the service being called, including its package name, if applicable. - * - * Note: This is the logical name of the service from the RPC interface perspective, which can be different from the name of any implementing class. The `code.namespace` attribute may be used to store the latter (despite the attribute name, it may include a class name; e.g., class with method actually executing the call on the server side, RPC client stub class on the client side). - */ + * The full (logical) name of the service being called, including its package name, if applicable. + * + * Note: This is the logical name of the service from the RPC interface perspective, which can be different from the name of any implementing class. The `code.namespace` attribute may be used to store the latter (despite the attribute name, it may include a class name; e.g., class with method actually executing the call on the server side, RPC client stub class on the client side). + */ RPC_SERVICE: 'rpc.service', /** - * The name of the (logical) method being called, must be equal to the $method part in the span name. - * - * Note: This is the logical name of the method from the RPC interface perspective, which can be different from the name of any implementing method/function. The `code.function` attribute may be used to store the latter (e.g., method actually executing the call on the server side, RPC client stub method on the client side). - */ + * The name of the (logical) method being called, must be equal to the $method part in the span name. + * + * Note: This is the logical name of the method from the RPC interface perspective, which can be different from the name of any implementing method/function. The `code.function` attribute may be used to store the latter (e.g., method actually executing the call on the server side, RPC client stub method on the client side). + */ RPC_METHOD: 'rpc.method', /** - * The [numeric status code](https://github.com/grpc/grpc/blob/v1.33.2/doc/statuscodes.md) of the gRPC request. - */ + * The [numeric status code](https://github.com/grpc/grpc/blob/v1.33.2/doc/statuscodes.md) of the gRPC request. + */ RPC_GRPC_STATUS_CODE: 'rpc.grpc.status_code', /** - * Protocol version as in `jsonrpc` property of request/response. Since JSON-RPC 1.0 does not specify this, the value can be omitted. - */ + * Protocol version as in `jsonrpc` property of request/response. Since JSON-RPC 1.0 does not specify this, the value can be omitted. + */ RPC_JSONRPC_VERSION: 'rpc.jsonrpc.version', /** - * `id` property of request or response. Since protocol allows id to be int, string, `null` or missing (for notifications), value is expected to be cast to string for simplicity. Use empty string in case of `null` value. Omit entirely if this is a notification. - */ + * `id` property of request or response. Since protocol allows id to be int, string, `null` or missing (for notifications), value is expected to be cast to string for simplicity. Use empty string in case of `null` value. Omit entirely if this is a notification. + */ RPC_JSONRPC_REQUEST_ID: 'rpc.jsonrpc.request_id', /** - * `error.code` property of response if it is an error response. - */ + * `error.code` property of response if it is an error response. + */ RPC_JSONRPC_ERROR_CODE: 'rpc.jsonrpc.error_code', /** - * `error.message` property of response if it is an error response. - */ + * `error.message` property of response if it is an error response. + */ RPC_JSONRPC_ERROR_MESSAGE: 'rpc.jsonrpc.error_message', /** - * Whether this is a received or sent message. - */ + * Whether this is a received or sent message. + */ MESSAGE_TYPE: 'message.type', /** - * MUST be calculated as two different counters starting from `1` one for sent messages and one for received message. - * - * Note: This way we guarantee that the values will be consistent between different implementations. - */ + * MUST be calculated as two different counters starting from `1` one for sent messages and one for received message. + * + * Note: This way we guarantee that the values will be consistent between different implementations. + */ MESSAGE_ID: 'message.id', /** - * Compressed size of the message in bytes. - */ + * Compressed size of the message in bytes. + */ MESSAGE_COMPRESSED_SIZE: 'message.compressed_size', /** - * Uncompressed size of the message in bytes. - */ + * Uncompressed size of the message in bytes. + */ MESSAGE_UNCOMPRESSED_SIZE: 'message.uncompressed_size', }; - export const DbSystemValues = { /** Some other SQL database. Fallback only. See notes. */ OTHER_SQL: 'other_sql', @@ -822,9 +828,6 @@ export const DbSystemValues = { } as const; export type DbSystemValues = typeof DbSystemValues[keyof typeof DbSystemValues]; - - - export const DbCassandraConsistencyLevelValues = { /** all. */ ALL: 'all', @@ -849,10 +852,8 @@ export const DbCassandraConsistencyLevelValues = { /** local_serial. */ LOCAL_SERIAL: 'local_serial', } as const; -export type DbCassandraConsistencyLevelValues = typeof DbCassandraConsistencyLevelValues[keyof typeof DbCassandraConsistencyLevelValues]; - - - +export type DbCassandraConsistencyLevelValues = + typeof DbCassandraConsistencyLevelValues[keyof typeof DbCassandraConsistencyLevelValues]; export const FaasTriggerValues = { /** A response to some data source operation such as a database or filesystem read/write. */ @@ -866,10 +867,8 @@ export const FaasTriggerValues = { /** If none of the others apply. */ OTHER: 'other', } as const; -export type FaasTriggerValues = typeof FaasTriggerValues[keyof typeof FaasTriggerValues]; - - - +export type FaasTriggerValues = + typeof FaasTriggerValues[keyof typeof FaasTriggerValues]; export const FaasDocumentOperationValues = { /** When a new object is created. */ @@ -879,10 +878,8 @@ export const FaasDocumentOperationValues = { /** When an object is deleted. */ DELETE: 'delete', } as const; -export type FaasDocumentOperationValues = typeof FaasDocumentOperationValues[keyof typeof FaasDocumentOperationValues]; - - - +export type FaasDocumentOperationValues = + typeof FaasDocumentOperationValues[keyof typeof FaasDocumentOperationValues]; export const FaasInvokedProviderValues = { /** Alibaba Cloud. */ @@ -894,10 +891,8 @@ export const FaasInvokedProviderValues = { /** Google Cloud Platform. */ GCP: 'gcp', } as const; -export type FaasInvokedProviderValues = typeof FaasInvokedProviderValues[keyof typeof FaasInvokedProviderValues]; - - - +export type FaasInvokedProviderValues = + typeof FaasInvokedProviderValues[keyof typeof FaasInvokedProviderValues]; export const NetTransportValues = { /** ip_tcp. */ @@ -915,10 +910,8 @@ export const NetTransportValues = { /** Something else (non IP-based). */ OTHER: 'other', } as const; -export type NetTransportValues = typeof NetTransportValues[keyof typeof NetTransportValues]; - - - +export type NetTransportValues = + typeof NetTransportValues[keyof typeof NetTransportValues]; export const NetHostConnectionTypeValues = { /** wifi. */ @@ -932,10 +925,8 @@ export const NetHostConnectionTypeValues = { /** unknown. */ UNKNOWN: 'unknown', } as const; -export type NetHostConnectionTypeValues = typeof NetHostConnectionTypeValues[keyof typeof NetHostConnectionTypeValues]; - - - +export type NetHostConnectionTypeValues = + typeof NetHostConnectionTypeValues[keyof typeof NetHostConnectionTypeValues]; export const NetHostConnectionSubtypeValues = { /** GPRS. */ @@ -981,10 +972,8 @@ export const NetHostConnectionSubtypeValues = { /** LTE CA. */ LTE_CA: 'lte_ca', } as const; -export type NetHostConnectionSubtypeValues = typeof NetHostConnectionSubtypeValues[keyof typeof NetHostConnectionSubtypeValues]; - - - +export type NetHostConnectionSubtypeValues = + typeof NetHostConnectionSubtypeValues[keyof typeof NetHostConnectionSubtypeValues]; export const HttpFlavorValues = { /** HTTP 1.0. */ @@ -998,10 +987,8 @@ export const HttpFlavorValues = { /** QUIC protocol. */ QUIC: 'QUIC', } as const; -export type HttpFlavorValues = typeof HttpFlavorValues[keyof typeof HttpFlavorValues]; - - - +export type HttpFlavorValues = + typeof HttpFlavorValues[keyof typeof HttpFlavorValues]; export const MessagingDestinationKindValues = { /** A message sent to a queue. */ @@ -1009,10 +996,8 @@ export const MessagingDestinationKindValues = { /** A message sent to a topic. */ TOPIC: 'topic', } as const; -export type MessagingDestinationKindValues = typeof MessagingDestinationKindValues[keyof typeof MessagingDestinationKindValues]; - - - +export type MessagingDestinationKindValues = + typeof MessagingDestinationKindValues[keyof typeof MessagingDestinationKindValues]; export const MessagingOperationValues = { /** receive. */ @@ -1020,10 +1005,8 @@ export const MessagingOperationValues = { /** process. */ PROCESS: 'process', } as const; -export type MessagingOperationValues = typeof MessagingOperationValues[keyof typeof MessagingOperationValues]; - - - +export type MessagingOperationValues = + typeof MessagingOperationValues[keyof typeof MessagingOperationValues]; export const RpcGrpcStatusCodeValues = { /** OK. */ @@ -1061,10 +1044,8 @@ export const RpcGrpcStatusCodeValues = { /** UNAUTHENTICATED. */ UNAUTHENTICATED: 16, } as const; -export type RpcGrpcStatusCodeValues = typeof RpcGrpcStatusCodeValues[keyof typeof RpcGrpcStatusCodeValues]; - - - +export type RpcGrpcStatusCodeValues = + typeof RpcGrpcStatusCodeValues[keyof typeof RpcGrpcStatusCodeValues]; export const MessageTypeValues = { /** sent. */ @@ -1072,5 +1053,5 @@ export const MessageTypeValues = { /** received. */ RECEIVED: 'RECEIVED', } as const; -export type MessageTypeValues = typeof MessageTypeValues[keyof typeof MessageTypeValues]; - +export type MessageTypeValues = + typeof MessageTypeValues[keyof typeof MessageTypeValues]; diff --git a/packages/opentelemetry-shim-opentracing/src/shim.ts b/packages/opentelemetry-shim-opentracing/src/shim.ts index b9fee53adc..8b4ecd1bf6 100644 --- a/packages/opentelemetry-shim-opentracing/src/shim.ts +++ b/packages/opentelemetry-shim-opentracing/src/shim.ts @@ -15,7 +15,12 @@ */ import * as api from '@opentelemetry/api'; -import { SpanAttributes, SpanAttributeValue, SpanStatusCode, TextMapPropagator } from '@opentelemetry/api'; +import { + SpanAttributes, + SpanAttributeValue, + SpanStatusCode, + TextMapPropagator, +} from '@opentelemetry/api'; import * as opentracing from 'opentracing'; import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; @@ -179,7 +184,10 @@ export class TracerShim extends opentracing.Tracer { } } - override _extract(format: string, carrier: unknown): opentracing.SpanContext | null { + override _extract( + format: string, + carrier: unknown + ): opentracing.SpanContext | null { if (format === opentracing.FORMAT_BINARY) { api.diag.warn('OpentracingShim.extract() does not support FORMAT_BINARY'); // @todo: Implement binary format @@ -199,7 +207,10 @@ export class TracerShim extends opentracing.Tracer { if (!spanContext) { return null; } - return new SpanContextShim(spanContext, baggage || api.propagation.createBaggage()); + return new SpanContextShim( + spanContext, + baggage || api.propagation.createBaggage() + ); } return null; } @@ -296,7 +307,11 @@ export class SpanShim extends opentracing.Span { return this; } - private _logInternal(eventName: string, attributes: SpanAttributes | undefined, timestamp?: number): void { + private _logInternal( + eventName: string, + attributes: SpanAttributes | undefined, + timestamp?: number + ): void { if (attributes && eventName === 'error') { const entries = Object.entries(attributes); const errorEntry = entries.find(([key]) => key === 'error.object'); diff --git a/packages/opentelemetry-shim-opentracing/test/Shim.test.ts b/packages/opentelemetry-shim-opentracing/test/Shim.test.ts index 3ed62d4aad..c2ff3c8e1a 100644 --- a/packages/opentelemetry-shim-opentracing/test/Shim.test.ts +++ b/packages/opentelemetry-shim-opentracing/test/Shim.test.ts @@ -40,10 +40,7 @@ import { SemanticAttributes } from '@opentelemetry/semantic-conventions'; describe('OpenTracing Shim', () => { const compositePropagator = new CompositePropagator({ - propagators: [ - new W3CTraceContextPropagator(), - new W3CBaggagePropagator(), - ], + propagators: [new W3CTraceContextPropagator(), new W3CBaggagePropagator()], }); propagation.setGlobalPropagator(compositePropagator); @@ -268,7 +265,10 @@ describe('OpenTracing Shim', () => { describe('SpanContextShim', () => { it('returns the correct context', () => { - const shim = new SpanContextShim(INVALID_SPAN_CONTEXT, propagation.createBaggage()); + const shim = new SpanContextShim( + INVALID_SPAN_CONTEXT, + propagation.createBaggage() + ); assert.strictEqual(shim.getSpanContext(), INVALID_SPAN_CONTEXT); assert.strictEqual(shim.toTraceId(), INVALID_SPAN_CONTEXT.traceId); assert.strictEqual(shim.toSpanId(), INVALID_SPAN_CONTEXT.spanId); @@ -367,19 +367,26 @@ describe('OpenTracing Shim', () => { it('records an exception', () => { const payload = { - 'error.object': 'boom', fault: 'meow' + 'error.object': 'boom', + fault: 'meow', }; span.logEvent('error', payload); assert.strictEqual(otSpan.events[0].name, 'exception'); const expectedAttributes = { [SemanticAttributes.EXCEPTION_MESSAGE]: 'boom', }; - assert.deepStrictEqual(otSpan.events[0].attributes, expectedAttributes); + assert.deepStrictEqual( + otSpan.events[0].attributes, + expectedAttributes + ); }); it('maps to exception semantic conventions', () => { const payload = { - fault: 'meow', 'error.kind': 'boom', message: 'oh no!', stack: 'pancakes' + fault: 'meow', + 'error.kind': 'boom', + message: 'oh no!', + stack: 'pancakes', }; span.logEvent('error', payload); assert.strictEqual(otSpan.events[0].name, 'exception'); @@ -387,9 +394,12 @@ describe('OpenTracing Shim', () => { fault: 'meow', [SemanticAttributes.EXCEPTION_TYPE]: 'boom', [SemanticAttributes.EXCEPTION_MESSAGE]: 'oh no!', - [SemanticAttributes.EXCEPTION_STACKTRACE]: 'pancakes' + [SemanticAttributes.EXCEPTION_STACKTRACE]: 'pancakes', }; - assert.deepStrictEqual(otSpan.events[0].attributes, expectedAttributes); + assert.deepStrictEqual( + otSpan.events[0].attributes, + expectedAttributes + ); }); }); @@ -400,7 +410,10 @@ describe('OpenTracing Shim', () => { const kvLogs = { event: 'fun-time', user: 'meow', value: 123 }; span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'fun-time'); - assert.strictEqual(otSpan.events[0].time[0], Math.trunc(tomorrow / 1000)); + assert.strictEqual( + otSpan.events[0].time[0], + Math.trunc(tomorrow / 1000) + ); assert.deepStrictEqual(otSpan.events[0].attributes, kvLogs); }); @@ -408,38 +421,59 @@ describe('OpenTracing Shim', () => { const kvLogs = { user: 'meow', value: 123 }; span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'log'); - assert.strictEqual(otSpan.events[0].time[0], Math.trunc(tomorrow / 1000)); + assert.strictEqual( + otSpan.events[0].time[0], + Math.trunc(tomorrow / 1000) + ); assert.deepStrictEqual(otSpan.events[0].attributes, kvLogs); }); it('records an exception', () => { const kvLogs = { - event: 'error', 'error.object': 'boom', fault: 'meow' + event: 'error', + 'error.object': 'boom', + fault: 'meow', }; span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'exception'); - assert.strictEqual(otSpan.events[0].time[0], Math.trunc(tomorrow / 1000)); + assert.strictEqual( + otSpan.events[0].time[0], + Math.trunc(tomorrow / 1000) + ); const expectedAttributes = { [SemanticAttributes.EXCEPTION_MESSAGE]: 'boom', }; - assert.deepStrictEqual(otSpan.events[0].attributes, expectedAttributes); + assert.deepStrictEqual( + otSpan.events[0].attributes, + expectedAttributes + ); }); it('maps to exception semantic conventions', () => { const kvLogs = { - event: 'error', fault: 'meow', 'error.kind': 'boom', message: 'oh no!', stack: 'pancakes' + event: 'error', + fault: 'meow', + 'error.kind': 'boom', + message: 'oh no!', + stack: 'pancakes', }; span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'exception'); - assert.strictEqual(otSpan.events[0].time[0], Math.trunc(tomorrow / 1000)); + assert.strictEqual( + otSpan.events[0].time[0], + Math.trunc(tomorrow / 1000) + ); const expectedAttributes = { event: 'error', fault: 'meow', [SemanticAttributes.EXCEPTION_TYPE]: 'boom', [SemanticAttributes.EXCEPTION_MESSAGE]: 'oh no!', - [SemanticAttributes.EXCEPTION_STACKTRACE]: 'pancakes' + [SemanticAttributes.EXCEPTION_STACKTRACE]: 'pancakes', }; - assert.deepStrictEqual(otSpan.events[0].attributes, expectedAttributes); + assert.deepStrictEqual( + otSpan.events[0].attributes, + expectedAttributes + ); }); }); }); diff --git a/packages/sdk-metrics/src/InstrumentDescriptor.ts b/packages/sdk-metrics/src/InstrumentDescriptor.ts index 4008bc3de2..01eabe2614 100644 --- a/packages/sdk-metrics/src/InstrumentDescriptor.ts +++ b/packages/sdk-metrics/src/InstrumentDescriptor.ts @@ -40,7 +40,11 @@ export interface InstrumentDescriptor { readonly valueType: ValueType; } -export function createInstrumentDescriptor(name: string, type: InstrumentType, options?: MetricOptions): InstrumentDescriptor { +export function createInstrumentDescriptor( + name: string, + type: InstrumentType, + options?: MetricOptions +): InstrumentDescriptor { return { name, type, @@ -50,7 +54,10 @@ export function createInstrumentDescriptor(name: string, type: InstrumentType, o }; } -export function createInstrumentDescriptorWithView(view: View, instrument: InstrumentDescriptor): InstrumentDescriptor { +export function createInstrumentDescriptorWithView( + view: View, + instrument: InstrumentDescriptor +): InstrumentDescriptor { return { name: view.name ?? instrument.name, description: view.description ?? instrument.description, @@ -60,9 +67,14 @@ export function createInstrumentDescriptorWithView(view: View, instrument: Instr }; } -export function isDescriptorCompatibleWith(descriptor: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor) { - return descriptor.name === otherDescriptor.name - && descriptor.unit === otherDescriptor.unit - && descriptor.type === otherDescriptor.type - && descriptor.valueType === otherDescriptor.valueType; +export function isDescriptorCompatibleWith( + descriptor: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +) { + return ( + descriptor.name === otherDescriptor.name && + descriptor.unit === otherDescriptor.unit && + descriptor.type === otherDescriptor.type && + descriptor.valueType === otherDescriptor.valueType + ); } diff --git a/packages/sdk-metrics/src/Instruments.ts b/packages/sdk-metrics/src/Instruments.ts index 8b70c06f70..09e849cc32 100644 --- a/packages/sdk-metrics/src/Instruments.ts +++ b/packages/sdk-metrics/src/Instruments.ts @@ -32,13 +32,26 @@ import { import { hrTime } from '@opentelemetry/core'; import { InstrumentDescriptor } from './InstrumentDescriptor'; import { ObservableRegistry } from './state/ObservableRegistry'; -import { AsyncWritableMetricStorage, WritableMetricStorage } from './state/WritableMetricStorage'; +import { + AsyncWritableMetricStorage, + WritableMetricStorage, +} from './state/WritableMetricStorage'; export class SyncInstrument { - constructor(private _writableMetricStorage: WritableMetricStorage, protected _descriptor: InstrumentDescriptor) {} + constructor( + private _writableMetricStorage: WritableMetricStorage, + protected _descriptor: InstrumentDescriptor + ) {} - protected _record(value: number, attributes: MetricAttributes = {}, context: Context = contextApi.active()) { - if (this._descriptor.valueType === ValueType.INT && !Number.isInteger(value)) { + protected _record( + value: number, + attributes: MetricAttributes = {}, + context: Context = contextApi.active() + ) { + if ( + this._descriptor.valueType === ValueType.INT && + !Number.isInteger(value) + ) { diag.warn( `INT value type cannot accept a floating-point value for ${this._descriptor.name}, ignoring the fractional digits.` ); @@ -51,7 +64,10 @@ export class SyncInstrument { /** * The class implements {@link UpDownCounter} interface. */ -export class UpDownCounterInstrument extends SyncInstrument implements UpDownCounter { +export class UpDownCounterInstrument + extends SyncInstrument + implements UpDownCounter +{ /** * Increment value of counter by the input. Inputs may be negative. */ @@ -69,7 +85,9 @@ export class CounterInstrument extends SyncInstrument implements Counter { */ add(value: number, attributes?: MetricAttributes, ctx?: Context): void { if (value < 0) { - diag.warn(`negative value provided to counter ${this._descriptor.name}: ${value}`); + diag.warn( + `negative value provided to counter ${this._descriptor.name}: ${value}` + ); return; } @@ -86,7 +104,9 @@ export class HistogramInstrument extends SyncInstrument implements Histogram { */ record(value: number, attributes?: MetricAttributes, ctx?: Context): void { if (value < 0) { - diag.warn(`negative value provided to histogram ${this._descriptor.name}: ${value}`); + diag.warn( + `negative value provided to histogram ${this._descriptor.name}: ${value}` + ); return; } this._record(value, attributes, ctx); @@ -99,7 +119,11 @@ export class ObservableInstrument implements Observable { /** @internal */ _descriptor: InstrumentDescriptor; - constructor(descriptor: InstrumentDescriptor, metricStorages: AsyncWritableMetricStorage[], private _observableRegistry: ObservableRegistry) { + constructor( + descriptor: InstrumentDescriptor, + metricStorages: AsyncWritableMetricStorage[], + private _observableRegistry: ObservableRegistry + ) { this._descriptor = descriptor; this._metricStorages = metricStorages; } @@ -119,10 +143,18 @@ export class ObservableInstrument implements Observable { } } -export class ObservableCounterInstrument extends ObservableInstrument implements ObservableCounter {} -export class ObservableGaugeInstrument extends ObservableInstrument implements ObservableGauge {} -export class ObservableUpDownCounterInstrument extends ObservableInstrument implements ObservableUpDownCounter {} +export class ObservableCounterInstrument + extends ObservableInstrument + implements ObservableCounter {} +export class ObservableGaugeInstrument + extends ObservableInstrument + implements ObservableGauge {} +export class ObservableUpDownCounterInstrument + extends ObservableInstrument + implements ObservableUpDownCounter {} -export function isObservableInstrument(it: unknown): it is ObservableInstrument { +export function isObservableInstrument( + it: unknown +): it is ObservableInstrument { return it instanceof ObservableInstrument; } diff --git a/packages/sdk-metrics/src/Meter.ts b/packages/sdk-metrics/src/Meter.ts index bd58dcbac1..b3d69d0dfd 100644 --- a/packages/sdk-metrics/src/Meter.ts +++ b/packages/sdk-metrics/src/Meter.ts @@ -26,7 +26,10 @@ import { BatchObservableCallback, Observable, } from '@opentelemetry/api'; -import { createInstrumentDescriptor, InstrumentType } from './InstrumentDescriptor'; +import { + createInstrumentDescriptor, + InstrumentType, +} from './InstrumentDescriptor'; import { CounterInstrument, HistogramInstrument, @@ -47,7 +50,11 @@ export class Meter implements IMeter { * Create a {@link Histogram} instrument. */ createHistogram(name: string, options?: MetricOptions): Histogram { - const descriptor = createInstrumentDescriptor(name, InstrumentType.HISTOGRAM, options); + const descriptor = createInstrumentDescriptor( + name, + InstrumentType.HISTOGRAM, + options + ); const storage = this._meterSharedState.registerMetricStorage(descriptor); return new HistogramInstrument(storage, descriptor); } @@ -56,7 +63,11 @@ export class Meter implements IMeter { * Create a {@link Counter} instrument. */ createCounter(name: string, options?: MetricOptions): Counter { - const descriptor = createInstrumentDescriptor(name, InstrumentType.COUNTER, options); + const descriptor = createInstrumentDescriptor( + name, + InstrumentType.COUNTER, + options + ); const storage = this._meterSharedState.registerMetricStorage(descriptor); return new CounterInstrument(storage, descriptor); } @@ -65,7 +76,11 @@ export class Meter implements IMeter { * Create a {@link UpDownCounter} instrument. */ createUpDownCounter(name: string, options?: MetricOptions): UpDownCounter { - const descriptor = createInstrumentDescriptor(name, InstrumentType.UP_DOWN_COUNTER, options); + const descriptor = createInstrumentDescriptor( + name, + InstrumentType.UP_DOWN_COUNTER, + options + ); const storage = this._meterSharedState.registerMetricStorage(descriptor); return new UpDownCounterInstrument(storage, descriptor); } @@ -75,11 +90,20 @@ export class Meter implements IMeter { */ createObservableGauge( name: string, - options?: MetricOptions, + options?: MetricOptions ): ObservableGauge { - const descriptor = createInstrumentDescriptor(name, InstrumentType.OBSERVABLE_GAUGE, options); - const storages = this._meterSharedState.registerAsyncMetricStorage(descriptor); - return new ObservableGaugeInstrument(descriptor, storages, this._meterSharedState.observableRegistry); + const descriptor = createInstrumentDescriptor( + name, + InstrumentType.OBSERVABLE_GAUGE, + options + ); + const storages = + this._meterSharedState.registerAsyncMetricStorage(descriptor); + return new ObservableGaugeInstrument( + descriptor, + storages, + this._meterSharedState.observableRegistry + ); } /** @@ -87,11 +111,20 @@ export class Meter implements IMeter { */ createObservableCounter( name: string, - options?: MetricOptions, + options?: MetricOptions ): ObservableCounter { - const descriptor = createInstrumentDescriptor(name, InstrumentType.OBSERVABLE_COUNTER, options); - const storages = this._meterSharedState.registerAsyncMetricStorage(descriptor); - return new ObservableCounterInstrument(descriptor, storages, this._meterSharedState.observableRegistry); + const descriptor = createInstrumentDescriptor( + name, + InstrumentType.OBSERVABLE_COUNTER, + options + ); + const storages = + this._meterSharedState.registerAsyncMetricStorage(descriptor); + return new ObservableCounterInstrument( + descriptor, + storages, + this._meterSharedState.observableRegistry + ); } /** @@ -99,24 +132,45 @@ export class Meter implements IMeter { */ createObservableUpDownCounter( name: string, - options?: MetricOptions, + options?: MetricOptions ): ObservableUpDownCounter { - const descriptor = createInstrumentDescriptor(name, InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, options); - const storages = this._meterSharedState.registerAsyncMetricStorage(descriptor); - return new ObservableUpDownCounterInstrument(descriptor, storages, this._meterSharedState.observableRegistry); + const descriptor = createInstrumentDescriptor( + name, + InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, + options + ); + const storages = + this._meterSharedState.registerAsyncMetricStorage(descriptor); + return new ObservableUpDownCounterInstrument( + descriptor, + storages, + this._meterSharedState.observableRegistry + ); } /** * @see {@link Meter.addBatchObservableCallback} */ - addBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]) { - this._meterSharedState.observableRegistry.addBatchCallback(callback, observables); + addBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[] + ) { + this._meterSharedState.observableRegistry.addBatchCallback( + callback, + observables + ); } /** * @see {@link Meter.removeBatchObservableCallback} */ - removeBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]) { - this._meterSharedState.observableRegistry.removeBatchCallback(callback, observables); + removeBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[] + ) { + this._meterSharedState.observableRegistry.removeBatchCallback( + callback, + observables + ); } } diff --git a/packages/sdk-metrics/src/MeterProvider.ts b/packages/sdk-metrics/src/MeterProvider.ts index 13b17fb564..72f0945b9f 100644 --- a/packages/sdk-metrics/src/MeterProvider.ts +++ b/packages/sdk-metrics/src/MeterProvider.ts @@ -45,10 +45,12 @@ export class MeterProvider implements IMeterProvider { private _shutdown = false; constructor(options?: MeterProviderOptions) { - const resource = Resource.default().merge(options?.resource ?? Resource.empty()); + const resource = Resource.default().merge( + options?.resource ?? Resource.empty() + ); this._sharedState = new MeterProviderSharedState(resource); - if(options?.views != null && options.views.length > 0){ - for(const view of options.views){ + if (options?.views != null && options.views.length > 0) { + for (const view of options.views) { this._sharedState.viewRegistry.addView(view); } } @@ -64,9 +66,11 @@ export class MeterProvider implements IMeterProvider { return createNoopMeter(); } - return this._sharedState - .getMeterSharedState({ name, version, schemaUrl: options.schemaUrl }) - .meter; + return this._sharedState.getMeterSharedState({ + name, + version, + schemaUrl: options.schemaUrl, + }).meter; } /** @@ -95,9 +99,11 @@ export class MeterProvider implements IMeterProvider { this._shutdown = true; - await Promise.all(this._sharedState.metricCollectors.map(collector => { - return collector.shutdown(options); - })); + await Promise.all( + this._sharedState.metricCollectors.map(collector => { + return collector.shutdown(options); + }) + ); } /** @@ -112,8 +118,10 @@ export class MeterProvider implements IMeterProvider { return; } - await Promise.all(this._sharedState.metricCollectors.map(collector => { - return collector.forceFlush(options); - })); + await Promise.all( + this._sharedState.metricCollectors.map(collector => { + return collector.forceFlush(options); + }) + ); } } diff --git a/packages/sdk-metrics/src/ObservableResult.ts b/packages/sdk-metrics/src/ObservableResult.ts index e3a46f1c4d..31e194290c 100644 --- a/packages/sdk-metrics/src/ObservableResult.ts +++ b/packages/sdk-metrics/src/ObservableResult.ts @@ -41,7 +41,10 @@ export class ObservableResultImpl implements ObservableResult { * Observe a measurement of the value associated with the given attributes. */ observe(value: number, attributes: MetricAttributes = {}): void { - if (this._descriptor.valueType === ValueType.INT && !Number.isInteger(value)) { + if ( + this._descriptor.valueType === ValueType.INT && + !Number.isInteger(value) + ) { diag.warn( `INT value type cannot accept a floating-point value for ${this._descriptor.name}, ignoring the fractional digits.` ); @@ -63,7 +66,11 @@ export class BatchObservableResultImpl implements BatchObservableResult { /** * Observe a measurement of the value associated with the given attributes. */ - observe(metric: Observable, value: number, attributes: MetricAttributes = {}): void { + observe( + metric: Observable, + value: number, + attributes: MetricAttributes = {} + ): void { if (!isObservableInstrument(metric)) { return; } @@ -72,7 +79,10 @@ export class BatchObservableResultImpl implements BatchObservableResult { map = new AttributeHashMap(); this._buffer.set(metric, map); } - if (metric._descriptor.valueType === ValueType.INT && !Number.isInteger(value)) { + if ( + metric._descriptor.valueType === ValueType.INT && + !Number.isInteger(value) + ) { diag.warn( `INT value type cannot accept a floating-point value for ${metric._descriptor.name}, ignoring the fractional digits.` ); diff --git a/packages/sdk-metrics/src/aggregator/Drop.ts b/packages/sdk-metrics/src/aggregator/Drop.ts index 183a579531..343c90357c 100644 --- a/packages/sdk-metrics/src/aggregator/Drop.ts +++ b/packages/sdk-metrics/src/aggregator/Drop.ts @@ -19,11 +19,7 @@ import { AggregationTemporality } from '../export/AggregationTemporality'; import { MetricData } from '../export/MetricData'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; import { Maybe } from '../utils'; -import { - AggregatorKind, - Aggregator, - AccumulationRecord, -} from './types'; +import { AggregatorKind, Aggregator, AccumulationRecord } from './types'; /** Basic aggregator for None which keeps no recorded value. */ export class DropAggregator implements Aggregator { @@ -45,7 +41,8 @@ export class DropAggregator implements Aggregator { _descriptor: InstrumentDescriptor, _aggregationTemporality: AggregationTemporality, _accumulationByAttributes: AccumulationRecord[], - _endTime: HrTime): Maybe { + _endTime: HrTime + ): Maybe { return undefined; } } diff --git a/packages/sdk-metrics/src/aggregator/Histogram.ts b/packages/sdk-metrics/src/aggregator/Histogram.ts index 5a271980de..a916e18248 100644 --- a/packages/sdk-metrics/src/aggregator/Histogram.ts +++ b/packages/sdk-metrics/src/aggregator/Histogram.ts @@ -18,17 +18,11 @@ import { Accumulation, AccumulationRecord, Aggregator, - AggregatorKind + AggregatorKind, } from './types'; -import { - DataPointType, - HistogramMetricData -} from '../export/MetricData'; +import { DataPointType, HistogramMetricData } from '../export/MetricData'; import { HrTime } from '@opentelemetry/api'; -import { - InstrumentDescriptor, - InstrumentType -} from '../InstrumentDescriptor'; +import { InstrumentDescriptor, InstrumentType } from '../InstrumentDescriptor'; import { Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; @@ -61,7 +55,7 @@ function createNewEmptyCheckpoint(boundaries: number[]): InternalHistogram { count: 0, hasMinMax: false, min: Infinity, - max: -Infinity + max: -Infinity, }; } @@ -113,10 +107,17 @@ export class HistogramAggregator implements Aggregator { * @param _boundaries upper bounds of recorded values. * @param _recordMinMax If set to true, min and max will be recorded. Otherwise, min and max will not be recorded. */ - constructor(private readonly _boundaries: number[], private readonly _recordMinMax: boolean) {} + constructor( + private readonly _boundaries: number[], + private readonly _recordMinMax: boolean + ) {} createAccumulation(startTime: HrTime) { - return new HistogramAccumulation(startTime, this._boundaries, this._recordMinMax); + return new HistogramAccumulation( + startTime, + this._boundaries, + this._recordMinMax + ); } /** @@ -124,7 +125,10 @@ export class HistogramAggregator implements Aggregator { * instance produces all Accumulations with constant boundaries we don't need to worry about * merging accumulations with different boundaries. */ - merge(previous: HistogramAccumulation, delta: HistogramAccumulation): HistogramAccumulation { + merge( + previous: HistogramAccumulation, + delta: HistogramAccumulation + ): HistogramAccumulation { const previousValue = previous.toPointValue(); const deltaValue = delta.toPointValue(); @@ -152,23 +156,33 @@ export class HistogramAggregator implements Aggregator { } } - return new HistogramAccumulation(previous.startTime, previousValue.buckets.boundaries, this._recordMinMax, { - buckets: { - boundaries: previousValue.buckets.boundaries, - counts: mergedCounts, - }, - count: previousValue.count + deltaValue.count, - sum: previousValue.sum + deltaValue.sum, - hasMinMax: this._recordMinMax && (previousValue.hasMinMax || deltaValue.hasMinMax), - min: min, - max: max - }); + return new HistogramAccumulation( + previous.startTime, + previousValue.buckets.boundaries, + this._recordMinMax, + { + buckets: { + boundaries: previousValue.buckets.boundaries, + counts: mergedCounts, + }, + count: previousValue.count + deltaValue.count, + sum: previousValue.sum + deltaValue.sum, + hasMinMax: + this._recordMinMax && + (previousValue.hasMinMax || deltaValue.hasMinMax), + min: min, + max: max, + } + ); } /** * Returns a new DELTA aggregation by comparing two cumulative measurements. */ - diff(previous: HistogramAccumulation, current: HistogramAccumulation): HistogramAccumulation { + diff( + previous: HistogramAccumulation, + current: HistogramAccumulation + ): HistogramAccumulation { const previousValue = previous.toPointValue(); const currentValue = current.toPointValue(); @@ -180,24 +194,30 @@ export class HistogramAggregator implements Aggregator { diffedCounts[idx] = currentCounts[idx] - previousCounts[idx]; } - return new HistogramAccumulation(current.startTime, previousValue.buckets.boundaries, this._recordMinMax, { - buckets: { - boundaries: previousValue.buckets.boundaries, - counts: diffedCounts, - }, - count: currentValue.count - previousValue.count, - sum: currentValue.sum - previousValue.sum, - hasMinMax: false, - min: Infinity, - max: -Infinity - }); + return new HistogramAccumulation( + current.startTime, + previousValue.buckets.boundaries, + this._recordMinMax, + { + buckets: { + boundaries: previousValue.buckets.boundaries, + counts: diffedCounts, + }, + count: currentValue.count - previousValue.count, + sum: currentValue.sum - previousValue.sum, + hasMinMax: false, + min: Infinity, + max: -Infinity, + } + ); } toMetricData( descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime): Maybe { + endTime: HrTime + ): Maybe { return { descriptor, aggregationTemporality, @@ -207,9 +227,9 @@ export class HistogramAggregator implements Aggregator { // determine if instrument allows negative values. const allowsNegativeValues = - (descriptor.type === InstrumentType.UP_DOWN_COUNTER) || - (descriptor.type === InstrumentType.OBSERVABLE_GAUGE) || - (descriptor.type === InstrumentType.OBSERVABLE_UP_DOWN_COUNTER); + descriptor.type === InstrumentType.UP_DOWN_COUNTER || + descriptor.type === InstrumentType.OBSERVABLE_GAUGE || + descriptor.type === InstrumentType.OBSERVABLE_UP_DOWN_COUNTER; return { attributes, @@ -220,10 +240,10 @@ export class HistogramAggregator implements Aggregator { max: pointValue.hasMinMax ? pointValue.max : undefined, sum: !allowsNegativeValues ? pointValue.sum : undefined, buckets: pointValue.buckets, - count: pointValue.count + count: pointValue.count, }, }; - }) + }), }; } } diff --git a/packages/sdk-metrics/src/aggregator/LastValue.ts b/packages/sdk-metrics/src/aggregator/LastValue.ts index dfdee59b65..e9e85b36bd 100644 --- a/packages/sdk-metrics/src/aggregator/LastValue.ts +++ b/packages/sdk-metrics/src/aggregator/LastValue.ts @@ -14,7 +14,13 @@ * limitations under the License. */ -import { Accumulation, AccumulationRecord, Aggregator, AggregatorKind, LastValue } from './types'; +import { + Accumulation, + AccumulationRecord, + Aggregator, + AggregatorKind, + LastValue, +} from './types'; import { HrTime } from '@opentelemetry/api'; import { hrTime, hrTimeToMicroseconds } from '@opentelemetry/core'; import { DataPointType, GaugeMetricData } from '../export/MetricData'; @@ -23,7 +29,11 @@ import { Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; export class LastValueAccumulation implements Accumulation { - constructor(public startTime: HrTime, private _current: number = 0, public sampleTime: HrTime = [0, 0]) {} + constructor( + public startTime: HrTime, + private _current: number = 0, + public sampleTime: HrTime = [0, 0] + ) {} record(value: number): void { this._current = value; @@ -52,10 +62,21 @@ export class LastValueAggregator implements Aggregator { * * Return the newly captured (delta) accumulation for LastValueAggregator. */ - merge(previous: LastValueAccumulation, delta: LastValueAccumulation): LastValueAccumulation { + merge( + previous: LastValueAccumulation, + delta: LastValueAccumulation + ): LastValueAccumulation { // nanoseconds may lose precisions. - const latestAccumulation = hrTimeToMicroseconds(delta.sampleTime) >= hrTimeToMicroseconds(previous.sampleTime) ? delta : previous; - return new LastValueAccumulation(previous.startTime, latestAccumulation.toPointValue(), latestAccumulation.sampleTime); + const latestAccumulation = + hrTimeToMicroseconds(delta.sampleTime) >= + hrTimeToMicroseconds(previous.sampleTime) + ? delta + : previous; + return new LastValueAccumulation( + previous.startTime, + latestAccumulation.toPointValue(), + latestAccumulation.sampleTime + ); } /** @@ -64,17 +85,29 @@ export class LastValueAggregator implements Aggregator { * A delta aggregation is not meaningful to LastValueAggregator, just return * the newly captured (delta) accumulation for LastValueAggregator. */ - diff(previous: LastValueAccumulation, current: LastValueAccumulation): LastValueAccumulation { + diff( + previous: LastValueAccumulation, + current: LastValueAccumulation + ): LastValueAccumulation { // nanoseconds may lose precisions. - const latestAccumulation = hrTimeToMicroseconds(current.sampleTime) >= hrTimeToMicroseconds(previous.sampleTime) ? current : previous; - return new LastValueAccumulation(current.startTime, latestAccumulation.toPointValue(), latestAccumulation.sampleTime); + const latestAccumulation = + hrTimeToMicroseconds(current.sampleTime) >= + hrTimeToMicroseconds(previous.sampleTime) + ? current + : previous; + return new LastValueAccumulation( + current.startTime, + latestAccumulation.toPointValue(), + latestAccumulation.sampleTime + ); } toMetricData( descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime): Maybe { + endTime: HrTime + ): Maybe { return { descriptor, aggregationTemporality, @@ -86,7 +119,7 @@ export class LastValueAggregator implements Aggregator { endTime, value: accumulation.toPointValue(), }; - }) + }), }; } } diff --git a/packages/sdk-metrics/src/aggregator/Sum.ts b/packages/sdk-metrics/src/aggregator/Sum.ts index f5e18d796f..ba53c389ee 100644 --- a/packages/sdk-metrics/src/aggregator/Sum.ts +++ b/packages/sdk-metrics/src/aggregator/Sum.ts @@ -14,7 +14,13 @@ * limitations under the License. */ -import { Sum, AggregatorKind, Aggregator, Accumulation, AccumulationRecord } from './types'; +import { + Sum, + AggregatorKind, + Aggregator, + Accumulation, + AccumulationRecord, +} from './types'; import { HrTime } from '@opentelemetry/api'; import { DataPointType, SumMetricData } from '../export/MetricData'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; @@ -22,7 +28,12 @@ import { Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; export class SumAccumulation implements Accumulation { - constructor(public startTime: HrTime, public monotonic: boolean, private _current: number = 0, public reset = false) {} + constructor( + public startTime: HrTime, + public monotonic: boolean, + private _current: number = 0, + public reset = false + ) {} record(value: number): void { if (this.monotonic && value < 0) { @@ -44,7 +55,7 @@ export class SumAccumulation implements Accumulation { export class SumAggregator implements Aggregator { public kind: AggregatorKind.SUM = AggregatorKind.SUM; - constructor (public monotonic: boolean) {} + constructor(public monotonic: boolean) {} createAccumulation(startTime: HrTime) { return new SumAccumulation(startTime, this.monotonic); @@ -57,9 +68,18 @@ export class SumAggregator implements Aggregator { const prevPv = previous.toPointValue(); const deltaPv = delta.toPointValue(); if (delta.reset) { - return new SumAccumulation(delta.startTime, this.monotonic, deltaPv, delta.reset); + return new SumAccumulation( + delta.startTime, + this.monotonic, + deltaPv, + delta.reset + ); } - return new SumAccumulation(previous.startTime, this.monotonic, prevPv + deltaPv); + return new SumAccumulation( + previous.startTime, + this.monotonic, + prevPv + deltaPv + ); } /** @@ -73,17 +93,27 @@ export class SumAggregator implements Aggregator { * greater than the current one, a reset is deemed to be happened. * Return the current point value to prevent the value from been reset. */ - if (this.monotonic && (prevPv > currPv)) { - return new SumAccumulation(current.startTime, this.monotonic, currPv, true); + if (this.monotonic && prevPv > currPv) { + return new SumAccumulation( + current.startTime, + this.monotonic, + currPv, + true + ); } - return new SumAccumulation(current.startTime, this.monotonic, currPv - prevPv); + return new SumAccumulation( + current.startTime, + this.monotonic, + currPv - prevPv + ); } toMetricData( descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime): Maybe { + endTime: HrTime + ): Maybe { return { descriptor, aggregationTemporality, @@ -96,7 +126,7 @@ export class SumAggregator implements Aggregator { value: accumulation.toPointValue(), }; }), - isMonotonic: this.monotonic + isMonotonic: this.monotonic, }; } } diff --git a/packages/sdk-metrics/src/aggregator/types.ts b/packages/sdk-metrics/src/aggregator/types.ts index cc6a2a4976..19008da250 100644 --- a/packages/sdk-metrics/src/aggregator/types.ts +++ b/packages/sdk-metrics/src/aggregator/types.ts @@ -115,8 +115,10 @@ export interface Aggregator { * @param endTime the end time of the metric data. * @return the {@link MetricData} that this {@link Aggregator} will produce. */ - toMetricData(descriptor: InstrumentDescriptor, + toMetricData( + descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime): Maybe; + endTime: HrTime + ): Maybe; } diff --git a/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts b/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts index 0148ead72b..11aab6da23 100644 --- a/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts +++ b/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts @@ -14,11 +14,9 @@ * limitations under the License. */ - import { Context, HrTime, MetricAttributes } from '@opentelemetry/api'; import { FixedSizeExemplarReservoirBase } from './ExemplarReservoir'; - /** * AlignedHistogramBucketExemplarReservoir takes the same boundaries * configuration of a Histogram. This algorithm keeps the last seen measurement @@ -27,12 +25,17 @@ import { FixedSizeExemplarReservoirBase } from './ExemplarReservoir'; export class AlignedHistogramBucketExemplarReservoir extends FixedSizeExemplarReservoirBase { private _boundaries: number[]; constructor(boundaries: number[]) { - super(boundaries.length+1); + super(boundaries.length + 1); this._boundaries = boundaries; } - private _findBucketIndex(value: number, _timestamp: HrTime, _attributes: MetricAttributes, _ctx: Context) { - for(let i = 0; i < this._boundaries.length; i++) { + private _findBucketIndex( + value: number, + _timestamp: HrTime, + _attributes: MetricAttributes, + _ctx: Context + ) { + for (let i = 0; i < this._boundaries.length; i++) { if (value <= this._boundaries[i]) { return i; } @@ -40,7 +43,12 @@ export class AlignedHistogramBucketExemplarReservoir extends FixedSizeExemplarRe return this._boundaries.length; } - offer(value: number, timestamp: HrTime, attributes: MetricAttributes, ctx: Context): void { + offer( + value: number, + timestamp: HrTime, + attributes: MetricAttributes, + ctx: Context + ): void { const index = this._findBucketIndex(value, timestamp, attributes, ctx); this._reservoirStorage[index].offer(value, timestamp, attributes, ctx); } diff --git a/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts index 01c4a5dbfb..9a4511e669 100644 --- a/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts @@ -17,9 +17,7 @@ import { Context, HrTime, MetricAttributes } from '@opentelemetry/api'; import { ExemplarFilter } from './ExemplarFilter'; - export class AlwaysSampleExemplarFilter implements ExemplarFilter { - shouldSample( _value: number, _timestamp: HrTime, diff --git a/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts b/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts index fb2e0b8adb..1fcad67339 100644 --- a/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts +++ b/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts @@ -14,15 +14,19 @@ * limitations under the License. */ -import { Context, HrTime, isSpanContextValid, trace, MetricAttributes } from '@opentelemetry/api'; +import { + Context, + HrTime, + isSpanContextValid, + trace, + MetricAttributes, +} from '@opentelemetry/api'; import { Exemplar } from './Exemplar'; - /** * An interface for an exemplar reservoir of samples. */ export interface ExemplarReservoir { - /** Offers a measurement to be sampled. */ offer( value: number, @@ -42,7 +46,6 @@ export interface ExemplarReservoir { collect(pointAttributes: MetricAttributes): Exemplar[]; } - class ExemplarBucket { private value: number = 0; private attributes: MetricAttributes = {}; @@ -51,7 +54,12 @@ class ExemplarBucket { private traceId?: string; private _offered: boolean = false; - offer(value: number, timestamp: HrTime, attributes: MetricAttributes, ctx: Context) { + offer( + value: number, + timestamp: HrTime, + attributes: MetricAttributes, + ctx: Context + ) { this.value = value; this.timestamp = timestamp; this.attributes = attributes; @@ -77,7 +85,7 @@ class ExemplarBucket { value: this.value, timestamp: this.timestamp, spanId: this.spanId, - traceId: this.traceId + traceId: this.traceId, }; this.attributes = {}; this.value = 0; @@ -89,20 +97,26 @@ class ExemplarBucket { } } - -export abstract class FixedSizeExemplarReservoirBase implements ExemplarReservoir { +export abstract class FixedSizeExemplarReservoirBase + implements ExemplarReservoir +{ protected _reservoirStorage: ExemplarBucket[]; protected _size: number; constructor(size: number) { this._size = size; this._reservoirStorage = new Array(size); - for(let i = 0; i < this._size; i++) { + for (let i = 0; i < this._size; i++) { this._reservoirStorage[i] = new ExemplarBucket(); } } - abstract offer(value: number, timestamp: HrTime, attributes: MetricAttributes, ctx: Context): void; + abstract offer( + value: number, + timestamp: HrTime, + attributes: MetricAttributes, + ctx: Context + ): void; maxSize(): number { return this._size; diff --git a/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts index c51adda410..19697ba3a1 100644 --- a/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts @@ -18,7 +18,6 @@ import { Context, HrTime, MetricAttributes } from '@opentelemetry/api'; import { ExemplarFilter } from './ExemplarFilter'; export class NeverSampleExemplarFilter implements ExemplarFilter { - shouldSample( _value: number, _timestamp: HrTime, diff --git a/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts b/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts index 892f0605dc..f51a979376 100644 --- a/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts +++ b/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts @@ -29,17 +29,29 @@ export class SimpleFixedSizeExemplarReservoir extends FixedSizeExemplarReservoir this._numMeasurementsSeen = 0; } - private getRandomInt(min: number, max: number) { //[min, max) + private getRandomInt(min: number, max: number) { + //[min, max) return Math.floor(Math.random() * (max - min) + min); } - private _findBucketIndex(_value: number, _timestamp: HrTime, _attributes: MetricAttributes, _ctx: Context) { - if (this._numMeasurementsSeen < this._size ) return this._numMeasurementsSeen++; + private _findBucketIndex( + _value: number, + _timestamp: HrTime, + _attributes: MetricAttributes, + _ctx: Context + ) { + if (this._numMeasurementsSeen < this._size) + return this._numMeasurementsSeen++; const index = this.getRandomInt(0, ++this._numMeasurementsSeen); - return index < this._size ? index: -1; + return index < this._size ? index : -1; } - offer(value: number, timestamp: HrTime, attributes: MetricAttributes, ctx: Context): void { + offer( + value: number, + timestamp: HrTime, + attributes: MetricAttributes, + ctx: Context + ): void { const index = this._findBucketIndex(value, timestamp, attributes, ctx); if (index !== -1) { this._reservoirStorage[index].offer(value, timestamp, attributes, ctx); diff --git a/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts index 6251a36a05..0977281d85 100644 --- a/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts @@ -14,11 +14,17 @@ * limitations under the License. */ -import { Context, HrTime, isSpanContextValid, trace, TraceFlags, MetricAttributes } from '@opentelemetry/api'; +import { + Context, + HrTime, + isSpanContextValid, + trace, + TraceFlags, + MetricAttributes, +} from '@opentelemetry/api'; import { ExemplarFilter } from './ExemplarFilter'; export class WithTraceExemplarFilter implements ExemplarFilter { - shouldSample( value: number, timestamp: HrTime, @@ -26,8 +32,7 @@ export class WithTraceExemplarFilter implements ExemplarFilter { ctx: Context ): boolean { const spanContext = trace.getSpanContext(ctx); - if (!spanContext || !isSpanContextValid(spanContext)) - return false; + if (!spanContext || !isSpanContextValid(spanContext)) return false; return spanContext.traceFlags & TraceFlags.SAMPLED ? true : false; } } diff --git a/packages/sdk-metrics/src/export/AggregationSelector.ts b/packages/sdk-metrics/src/export/AggregationSelector.ts index b0ef5f36ad..7a4eaca935 100644 --- a/packages/sdk-metrics/src/export/AggregationSelector.ts +++ b/packages/sdk-metrics/src/export/AggregationSelector.ts @@ -21,12 +21,18 @@ import { AggregationTemporality } from './AggregationTemporality'; /** * Aggregation selector based on metric instrument types. */ -export type AggregationSelector = (instrumentType: InstrumentType) => Aggregation; +export type AggregationSelector = ( + instrumentType: InstrumentType +) => Aggregation; /** - * Aggregation temporality selector based on metric instrument types. - */ -export type AggregationTemporalitySelector = (instrumentType: InstrumentType) => AggregationTemporality; + * Aggregation temporality selector based on metric instrument types. + */ +export type AggregationTemporalitySelector = ( + instrumentType: InstrumentType +) => AggregationTemporality; -export const DEFAULT_AGGREGATION_SELECTOR: AggregationSelector = _instrumentType => Aggregation.Default(); -export const DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR: AggregationTemporalitySelector = _instrumentType => AggregationTemporality.CUMULATIVE; +export const DEFAULT_AGGREGATION_SELECTOR: AggregationSelector = + _instrumentType => Aggregation.Default(); +export const DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR: AggregationTemporalitySelector = + _instrumentType => AggregationTemporality.CUMULATIVE; diff --git a/packages/sdk-metrics/src/export/ConsoleMetricExporter.ts b/packages/sdk-metrics/src/export/ConsoleMetricExporter.ts index 0b990dfe5e..36c8b48806 100644 --- a/packages/sdk-metrics/src/export/ConsoleMetricExporter.ts +++ b/packages/sdk-metrics/src/export/ConsoleMetricExporter.ts @@ -13,10 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { - ExportResult, - ExportResultCode -} from '@opentelemetry/core'; +import { ExportResult, ExportResultCode } from '@opentelemetry/core'; import { InstrumentType } from '../InstrumentDescriptor'; import { AggregationTemporality } from './AggregationTemporality'; import { ResourceMetrics } from './MetricData'; @@ -27,7 +24,7 @@ import { } from './AggregationSelector'; interface ConsoleMetricExporterOptions { - temporalitySelector?: AggregationTemporalitySelector + temporalitySelector?: AggregationTemporalitySelector; } /* eslint-disable no-console */ @@ -36,10 +33,14 @@ export class ConsoleMetricExporter implements PushMetricExporter { protected _temporalitySelector: AggregationTemporalitySelector; constructor(options?: ConsoleMetricExporterOptions) { - this._temporalitySelector = options?.temporalitySelector ?? DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR; + this._temporalitySelector = + options?.temporalitySelector ?? DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR; } - export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void { + export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void + ): void { if (this._shutdown) { // If the exporter is shutting down, by spec, we need to return FAILED as export result setImmediate(resultCallback, { code: ExportResultCode.FAILED }); @@ -53,7 +54,9 @@ export class ConsoleMetricExporter implements PushMetricExporter { return Promise.resolve(); } - selectAggregationTemporality(_instrumentType: InstrumentType): AggregationTemporality { + selectAggregationTemporality( + _instrumentType: InstrumentType + ): AggregationTemporality { return this._temporalitySelector(_instrumentType); } @@ -62,13 +65,16 @@ export class ConsoleMetricExporter implements PushMetricExporter { return Promise.resolve(); } - private static _sendMetrics(metrics: ResourceMetrics, done: (result: ExportResult) => void): void { + private static _sendMetrics( + metrics: ResourceMetrics, + done: (result: ExportResult) => void + ): void { for (const scopeMetrics of metrics.scopeMetrics) { for (const metric of scopeMetrics.metrics) { console.dir({ descriptor: metric.descriptor, dataPointType: metric.dataPointType, - dataPoints: metric.dataPoints + dataPoints: metric.dataPoints, }); } } diff --git a/packages/sdk-metrics/src/export/InMemoryMetricExporter.ts b/packages/sdk-metrics/src/export/InMemoryMetricExporter.ts index c0ec67e74a..2a80973bb9 100644 --- a/packages/sdk-metrics/src/export/InMemoryMetricExporter.ts +++ b/packages/sdk-metrics/src/export/InMemoryMetricExporter.ts @@ -38,9 +38,12 @@ export class InMemoryMetricExporter implements PushMetricExporter { /** * @inheritedDoc */ - export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void { + export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void + ): void { // Avoid storing metrics when exporter is shutdown - if (this. _shutdown) { + if (this._shutdown) { setTimeout(() => resultCallback({ code: ExportResultCode.FAILED }), 0); return; } @@ -65,7 +68,9 @@ export class InMemoryMetricExporter implements PushMetricExporter { this._metrics = []; } - selectAggregationTemporality(_instrumentType: InstrumentType): AggregationTemporality { + selectAggregationTemporality( + _instrumentType: InstrumentType + ): AggregationTemporality { return this._aggregationTemporality; } diff --git a/packages/sdk-metrics/src/export/MetricData.ts b/packages/sdk-metrics/src/export/MetricData.ts index b15da1d9b9..e7adfa0357 100644 --- a/packages/sdk-metrics/src/export/MetricData.ts +++ b/packages/sdk-metrics/src/export/MetricData.ts @@ -113,7 +113,7 @@ export enum DataPointType { * A sum metric data point has a single numeric value and a * monotonicity-indicator. */ - SUM + SUM, } /** diff --git a/packages/sdk-metrics/src/export/MetricExporter.ts b/packages/sdk-metrics/src/export/MetricExporter.ts index 1421ddb71f..bf9362bcdd 100644 --- a/packages/sdk-metrics/src/export/MetricExporter.ts +++ b/packages/sdk-metrics/src/export/MetricExporter.ts @@ -16,9 +16,7 @@ import { AggregationTemporality } from './AggregationTemporality'; import { ResourceMetrics } from './MetricData'; -import { - ExportResult, -} from '@opentelemetry/core'; +import { ExportResult } from '@opentelemetry/core'; import { InstrumentType } from '../InstrumentDescriptor'; import { Aggregation } from '../view/Aggregation'; @@ -34,7 +32,10 @@ export interface PushMetricExporter { * @param metrics the metric data to be exported. * @param resultCallback callback for when the export has completed */ - export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void; + export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void + ): void; /** * Ensure that the export of any metrics the exporter has received is @@ -46,7 +47,9 @@ export interface PushMetricExporter { * Select the {@link AggregationTemporality} for the given * {@link InstrumentType} for this exporter. */ - selectAggregationTemporality?(instrumentType: InstrumentType): AggregationTemporality; + selectAggregationTemporality?( + instrumentType: InstrumentType + ): AggregationTemporality; /** * Select the {@link Aggregation} for the given diff --git a/packages/sdk-metrics/src/export/MetricReader.ts b/packages/sdk-metrics/src/export/MetricReader.ts index acf22d46f0..3bc4c63a06 100644 --- a/packages/sdk-metrics/src/export/MetricReader.ts +++ b/packages/sdk-metrics/src/export/MetricReader.ts @@ -23,14 +23,14 @@ import { InstrumentType } from '../InstrumentDescriptor'; import { CollectionOptions, ForceFlushOptions, - ShutdownOptions + ShutdownOptions, } from '../types'; import { Aggregation } from '../view/Aggregation'; import { AggregationSelector, AggregationTemporalitySelector, DEFAULT_AGGREGATION_SELECTOR, - DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR, } from './AggregationSelector'; export interface MetricReaderOptions { @@ -61,9 +61,10 @@ export abstract class MetricReader { private readonly _aggregationSelector: AggregationSelector; constructor(options?: MetricReaderOptions) { - this._aggregationSelector = options?.aggregationSelector ?? - DEFAULT_AGGREGATION_SELECTOR; - this._aggregationTemporalitySelector = options?.aggregationTemporalitySelector ?? + this._aggregationSelector = + options?.aggregationSelector ?? DEFAULT_AGGREGATION_SELECTOR; + this._aggregationTemporalitySelector = + options?.aggregationTemporalitySelector ?? DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR; } @@ -74,7 +75,9 @@ export abstract class MetricReader { */ setMetricProducer(metricProducer: MetricProducer) { if (this._metricProducer) { - throw new Error('MetricReader can not be bound to a MeterProvider again.'); + throw new Error( + 'MetricReader can not be bound to a MeterProvider again.' + ); } this._metricProducer = metricProducer; this.onInitialized(); @@ -92,7 +95,9 @@ export abstract class MetricReader { * Select the {@link AggregationTemporality} for the given * {@link InstrumentType} for this reader. */ - selectAggregationTemporality(instrumentType: InstrumentType): AggregationTemporality { + selectAggregationTemporality( + instrumentType: InstrumentType + ): AggregationTemporality { return this._aggregationTemporalitySelector(instrumentType); } diff --git a/packages/sdk-metrics/src/export/PeriodicExportingMetricReader.ts b/packages/sdk-metrics/src/export/PeriodicExportingMetricReader.ts index 2a686181cc..e9da187a45 100644 --- a/packages/sdk-metrics/src/export/PeriodicExportingMetricReader.ts +++ b/packages/sdk-metrics/src/export/PeriodicExportingMetricReader.ts @@ -19,14 +19,11 @@ import { internal, ExportResultCode, globalErrorHandler, - unrefTimer + unrefTimer, } from '@opentelemetry/core'; import { MetricReader } from './MetricReader'; import { PushMetricExporter } from './MetricExporter'; -import { - callWithTimeout, - TimeoutError -} from '../utils'; +import { callWithTimeout, TimeoutError } from '../utils'; export type PeriodicExportingMetricReaderOptions = { /** @@ -56,22 +53,35 @@ export class PeriodicExportingMetricReader extends MetricReader { constructor(options: PeriodicExportingMetricReaderOptions) { super({ - aggregationSelector: options.exporter.selectAggregation?.bind(options.exporter), - aggregationTemporalitySelector: options.exporter.selectAggregationTemporality?.bind(options.exporter) + aggregationSelector: options.exporter.selectAggregation?.bind( + options.exporter + ), + aggregationTemporalitySelector: + options.exporter.selectAggregationTemporality?.bind(options.exporter), }); - if (options.exportIntervalMillis !== undefined && options.exportIntervalMillis <= 0) { + if ( + options.exportIntervalMillis !== undefined && + options.exportIntervalMillis <= 0 + ) { throw Error('exportIntervalMillis must be greater than 0'); } - if (options.exportTimeoutMillis !== undefined && options.exportTimeoutMillis <= 0) { + if ( + options.exportTimeoutMillis !== undefined && + options.exportTimeoutMillis <= 0 + ) { throw Error('exportTimeoutMillis must be greater than 0'); } - if (options.exportTimeoutMillis !== undefined && + if ( + options.exportTimeoutMillis !== undefined && options.exportIntervalMillis !== undefined && - options.exportIntervalMillis < options.exportTimeoutMillis) { - throw Error('exportIntervalMillis must be greater than or equal to exportTimeoutMillis'); + options.exportIntervalMillis < options.exportTimeoutMillis + ) { + throw Error( + 'exportIntervalMillis must be greater than or equal to exportTimeoutMillis' + ); } this._exportInterval = options.exportIntervalMillis ?? 60000; @@ -83,7 +93,10 @@ export class PeriodicExportingMetricReader extends MetricReader { const { resourceMetrics, errors } = await this.collect({}); if (errors.length > 0) { - api.diag.error('PeriodicExportingMetricReader: metrics collection errors', ...errors); + api.diag.error( + 'PeriodicExportingMetricReader: metrics collection errors', + ...errors + ); } const result = await internal._export(this._exporter, resourceMetrics); @@ -101,7 +114,10 @@ export class PeriodicExportingMetricReader extends MetricReader { await callWithTimeout(this._runOnce(), this._exportTimeout); } catch (err) { if (err instanceof TimeoutError) { - api.diag.error('Export took longer than %s milliseconds and timed out.', this._exportTimeout); + api.diag.error( + 'Export took longer than %s milliseconds and timed out.', + this._exportTimeout + ); return; } diff --git a/packages/sdk-metrics/src/index.ts b/packages/sdk-metrics/src/index.ts index 0d82d420a5..d77f6a4b10 100644 --- a/packages/sdk-metrics/src/index.ts +++ b/packages/sdk-metrics/src/index.ts @@ -14,20 +14,14 @@ * limitations under the License. */ -export { - Sum, - LastValue, - Histogram, -} from './aggregator/types'; +export { Sum, LastValue, Histogram } from './aggregator/types'; export { AggregationSelector, AggregationTemporalitySelector, } from './export/AggregationSelector'; -export { - AggregationTemporality, -} from './export/AggregationTemporality'; +export { AggregationTemporality } from './export/AggregationTemporality'; export { DataPoint, @@ -41,37 +35,22 @@ export { CollectionResult, } from './export/MetricData'; -export { - PushMetricExporter, -} from './export/MetricExporter'; +export { PushMetricExporter } from './export/MetricExporter'; -export { - MetricReader, - MetricReaderOptions -} from './export/MetricReader'; +export { MetricReader, MetricReaderOptions } from './export/MetricReader'; export { PeriodicExportingMetricReader, PeriodicExportingMetricReaderOptions, } from './export/PeriodicExportingMetricReader'; -export { - InMemoryMetricExporter, -} from './export/InMemoryMetricExporter'; +export { InMemoryMetricExporter } from './export/InMemoryMetricExporter'; -export { - ConsoleMetricExporter, -} from './export/ConsoleMetricExporter'; +export { ConsoleMetricExporter } from './export/ConsoleMetricExporter'; -export { - InstrumentDescriptor, - InstrumentType, -} from './InstrumentDescriptor'; +export { InstrumentDescriptor, InstrumentType } from './InstrumentDescriptor'; -export { - MeterProvider, - MeterProviderOptions, -} from './MeterProvider'; +export { MeterProvider, MeterProviderOptions } from './MeterProvider'; export { DefaultAggregation, @@ -80,14 +59,9 @@ export { HistogramAggregation, LastValueAggregation, SumAggregation, - Aggregation + Aggregation, } from './view/Aggregation'; -export { - View, - ViewOptions, -} from './view/View'; +export { View, ViewOptions } from './view/View'; -export { - TimeoutError -} from './utils'; +export { TimeoutError } from './utils'; diff --git a/packages/sdk-metrics/src/state/AsyncMetricStorage.ts b/packages/sdk-metrics/src/state/AsyncMetricStorage.ts index 6742bf884c..286874987c 100644 --- a/packages/sdk-metrics/src/state/AsyncMetricStorage.ts +++ b/packages/sdk-metrics/src/state/AsyncMetricStorage.ts @@ -32,14 +32,17 @@ import { AsyncWritableMetricStorage } from './WritableMetricStorage'; * * Stores and aggregates {@link MetricData} for asynchronous instruments. */ -export class AsyncMetricStorage> extends MetricStorage implements AsyncWritableMetricStorage { +export class AsyncMetricStorage> + extends MetricStorage + implements AsyncWritableMetricStorage +{ private _deltaMetricStorage: DeltaMetricProcessor; private _temporalMetricStorage: TemporalMetricProcessor; constructor( _instrumentDescriptor: InstrumentDescriptor, aggregator: Aggregator, - private _attributesProcessor: AttributesProcessor, + private _attributesProcessor: AttributesProcessor ) { super(_instrumentDescriptor); this._deltaMetricStorage = new DeltaMetricProcessor(aggregator); @@ -64,7 +67,7 @@ export class AsyncMetricStorage> extends MetricSto collect( collector: MetricCollectorHandle, collectors: MetricCollectorHandle[], - collectionTime: HrTime, + collectionTime: HrTime ): Maybe { const accumulations = this._deltaMetricStorage.collect(); diff --git a/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts b/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts index 999db3c282..3e6cb5eefc 100644 --- a/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts +++ b/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts @@ -34,7 +34,12 @@ export class DeltaMetricProcessor> { constructor(private _aggregator: Aggregator) {} - record(value: number, attributes: MetricAttributes, _context: Context, collectionTime: HrTime) { + record( + value: number, + attributes: MetricAttributes, + _context: Context, + collectionTime: HrTime + ) { const accumulation = this._activeCollectionStorage.getOrDefault( attributes, () => this._aggregator.createAccumulation(collectionTime) @@ -42,22 +47,31 @@ export class DeltaMetricProcessor> { accumulation?.record(value); } - batchCumulate(measurements: AttributeHashMap, collectionTime: HrTime) { - Array.from(measurements.entries()).forEach(([attributes, value, hashCode]) => { - const accumulation = this._aggregator.createAccumulation(collectionTime); - accumulation?.record(value); - let delta = accumulation; - if (this._cumulativeMemoStorage.has(attributes, hashCode)) { - // has() returned true, previous is present. - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const previous = this._cumulativeMemoStorage.get(attributes, hashCode)!; - delta = this._aggregator.diff(previous, accumulation); - } + batchCumulate( + measurements: AttributeHashMap, + collectionTime: HrTime + ) { + Array.from(measurements.entries()).forEach( + ([attributes, value, hashCode]) => { + const accumulation = + this._aggregator.createAccumulation(collectionTime); + accumulation?.record(value); + let delta = accumulation; + if (this._cumulativeMemoStorage.has(attributes, hashCode)) { + // has() returned true, previous is present. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const previous = this._cumulativeMemoStorage.get( + attributes, + hashCode + )!; + delta = this._aggregator.diff(previous, accumulation); + } - // Save the current record and the delta record. - this._cumulativeMemoStorage.set(attributes, accumulation, hashCode); - this._activeCollectionStorage.set(attributes, delta, hashCode); - }); + // Save the current record and the delta record. + this._cumulativeMemoStorage.set(attributes, accumulation, hashCode); + this._activeCollectionStorage.set(attributes, delta, hashCode); + } + ); } /** diff --git a/packages/sdk-metrics/src/state/HashMap.ts b/packages/sdk-metrics/src/state/HashMap.ts index 681d4dedee..36011d6675 100644 --- a/packages/sdk-metrics/src/state/HashMap.ts +++ b/packages/sdk-metrics/src/state/HashMap.ts @@ -62,7 +62,7 @@ export class HashMap { const keyIterator = this._keyMap.entries(); let next = keyIterator.next(); while (next.done !== true) { - yield [ next.value[1], next.value[0]]; + yield [next.value[1], next.value[0]]; next = keyIterator.next(); } } @@ -73,7 +73,7 @@ export class HashMap { while (next.done !== true) { // next.value[0] here can not be undefined // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - yield [ this._keyMap.get(next.value[0])!, next.value[1], next.value[0]]; + yield [this._keyMap.get(next.value[0])!, next.value[1], next.value[0]]; next = valueIterator.next(); } } @@ -83,7 +83,11 @@ export class HashMap { } } -export class AttributeHashMap extends HashMap { +export class AttributeHashMap extends HashMap< + MetricAttributes, + ValueType, + string +> { constructor() { super(hashAttributes); } diff --git a/packages/sdk-metrics/src/state/MeterSharedState.ts b/packages/sdk-metrics/src/state/MeterSharedState.ts index 65ea6edde7..330d293eea 100644 --- a/packages/sdk-metrics/src/state/MeterSharedState.ts +++ b/packages/sdk-metrics/src/state/MeterSharedState.ts @@ -18,7 +18,10 @@ import { HrTime } from '@opentelemetry/api'; import { InstrumentationScope } from '@opentelemetry/core'; import { MetricCollectOptions } from '../export/MetricProducer'; import { ScopeMetrics } from '../export/MetricData'; -import { createInstrumentDescriptorWithView, InstrumentDescriptor } from '../InstrumentDescriptor'; +import { + createInstrumentDescriptorWithView, + InstrumentDescriptor, +} from '../InstrumentDescriptor'; import { Meter } from '../Meter'; import { isNotNullish, Maybe } from '../utils'; import { AsyncMetricStorage } from './AsyncMetricStorage'; @@ -40,21 +43,27 @@ export class MeterSharedState { observableRegistry = new ObservableRegistry(); meter: Meter; - constructor(private _meterProviderSharedState: MeterProviderSharedState, private _instrumentationScope: InstrumentationScope) { + constructor( + private _meterProviderSharedState: MeterProviderSharedState, + private _instrumentationScope: InstrumentationScope + ) { this.meter = new Meter(this); } registerMetricStorage(descriptor: InstrumentDescriptor) { const storages = this._registerMetricStorage(descriptor, SyncMetricStorage); - if (storages.length === 1) { + if (storages.length === 1) { return storages[0]; } return new MultiMetricStorage(storages); } registerAsyncMetricStorage(descriptor: InstrumentDescriptor) { - const storages = this._registerMetricStorage(descriptor, AsyncMetricStorage); + const storages = this._registerMetricStorage( + descriptor, + AsyncMetricStorage + ); return storages; } @@ -65,18 +74,28 @@ export class MeterSharedState { * @param options options for collection. * @returns the list of metric data collected. */ - async collect(collector: MetricCollectorHandle, collectionTime: HrTime, options?: MetricCollectOptions): Promise { + async collect( + collector: MetricCollectorHandle, + collectionTime: HrTime, + options?: MetricCollectOptions + ): Promise { /** * 1. Call all observable callbacks first. * 2. Collect metric result for the collector. */ - const errors = await this.observableRegistry.observe(collectionTime, options?.timeoutMillis); - const metricDataList = Array.from(this.metricStorageRegistry.getStorages(collector)) + const errors = await this.observableRegistry.observe( + collectionTime, + options?.timeoutMillis + ); + const metricDataList = Array.from( + this.metricStorageRegistry.getStorages(collector) + ) .map(metricStorage => { return metricStorage.collect( collector, this._meterProviderSharedState.metricCollectors, - collectionTime); + collectionTime + ); }) .filter(isNotNullish); @@ -89,34 +108,63 @@ export class MeterSharedState { }; } - private _registerMetricStorage>(descriptor: InstrumentDescriptor, MetricStorageType: MetricStorageType): R[] { - const views = this._meterProviderSharedState.viewRegistry.findViews(descriptor, this._instrumentationScope); - let storages = views - .map(view => { - const viewDescriptor = createInstrumentDescriptorWithView(view, descriptor); - const compatibleStorage = this.metricStorageRegistry.findOrUpdateCompatibleStorage(viewDescriptor); - if (compatibleStorage != null) { - return compatibleStorage; - } - const aggregator = view.aggregation.createAggregator(viewDescriptor); - const viewStorage = new MetricStorageType(viewDescriptor, aggregator, view.attributesProcessor) as R; - this.metricStorageRegistry.register(viewStorage); - return viewStorage; - }); + private _registerMetricStorage< + MetricStorageType extends MetricStorageConstructor, + R extends InstanceType + >( + descriptor: InstrumentDescriptor, + MetricStorageType: MetricStorageType + ): R[] { + const views = this._meterProviderSharedState.viewRegistry.findViews( + descriptor, + this._instrumentationScope + ); + let storages = views.map(view => { + const viewDescriptor = createInstrumentDescriptorWithView( + view, + descriptor + ); + const compatibleStorage = + this.metricStorageRegistry.findOrUpdateCompatibleStorage( + viewDescriptor + ); + if (compatibleStorage != null) { + return compatibleStorage; + } + const aggregator = view.aggregation.createAggregator(viewDescriptor); + const viewStorage = new MetricStorageType( + viewDescriptor, + aggregator, + view.attributesProcessor + ) as R; + this.metricStorageRegistry.register(viewStorage); + return viewStorage; + }); // Fallback to the per-collector aggregations if no view is configured for the instrument. if (storages.length === 0) { - const perCollectorAggregations = this._meterProviderSharedState.selectAggregations(descriptor.type); - const collectorStorages = perCollectorAggregations.map(([collector, aggregation]) => { - const compatibleStorage = this.metricStorageRegistry.findOrUpdateCompatibleCollectorStorage(collector, descriptor); - if (compatibleStorage != null) { - return compatibleStorage; + const perCollectorAggregations = + this._meterProviderSharedState.selectAggregations(descriptor.type); + const collectorStorages = perCollectorAggregations.map( + ([collector, aggregation]) => { + const compatibleStorage = + this.metricStorageRegistry.findOrUpdateCompatibleCollectorStorage( + collector, + descriptor + ); + if (compatibleStorage != null) { + return compatibleStorage; + } + const aggregator = aggregation.createAggregator(descriptor); + const storage = new MetricStorageType( + descriptor, + aggregator, + AttributesProcessor.Noop() + ) as R; + this.metricStorageRegistry.registerForCollector(collector, storage); + return storage; } - const aggregator = aggregation.createAggregator(descriptor); - const storage = new MetricStorageType(descriptor, aggregator, AttributesProcessor.Noop()) as R; - this.metricStorageRegistry.registerForCollector(collector, storage); - return storage; - }); + ); storages = storages.concat(collectorStorages); } @@ -130,7 +178,9 @@ interface ScopeMetricsResult { } interface MetricStorageConstructor { - new (instrumentDescriptor: InstrumentDescriptor, + new ( + instrumentDescriptor: InstrumentDescriptor, aggregator: Aggregator>, - attributesProcessor: AttributesProcessor): MetricStorage; + attributesProcessor: AttributesProcessor + ): MetricStorage; } diff --git a/packages/sdk-metrics/src/state/MetricCollector.ts b/packages/sdk-metrics/src/state/MetricCollector.ts index fbffc2f060..95fe8d4e14 100644 --- a/packages/sdk-metrics/src/state/MetricCollector.ts +++ b/packages/sdk-metrics/src/state/MetricCollector.ts @@ -30,13 +30,18 @@ import { MeterProviderSharedState } from './MeterProviderSharedState'; * state for each MetricReader. */ export class MetricCollector implements MetricProducer { - constructor(private _sharedState: MeterProviderSharedState, private _metricReader: MetricReader) { - } + constructor( + private _sharedState: MeterProviderSharedState, + private _metricReader: MetricReader + ) {} async collect(options?: MetricCollectOptions): Promise { const collectionTime = hrTime(); - const meterCollectionPromises = Array.from(this._sharedState.meterSharedStates.values()) - .map(meterSharedState => meterSharedState.collect(this, collectionTime, options)); + const meterCollectionPromises = Array.from( + this._sharedState.meterSharedStates.values() + ).map(meterSharedState => + meterSharedState.collect(this, collectionTime, options) + ); const result = await Promise.all(meterCollectionPromises); return { diff --git a/packages/sdk-metrics/src/state/MetricStorage.ts b/packages/sdk-metrics/src/state/MetricStorage.ts index f69a00daa4..5d02437f58 100644 --- a/packages/sdk-metrics/src/state/MetricStorage.ts +++ b/packages/sdk-metrics/src/state/MetricStorage.ts @@ -18,7 +18,10 @@ import { HrTime } from '@opentelemetry/api'; import { MetricData } from '../export/MetricData'; import { Maybe } from '../utils'; import { MetricCollectorHandle } from './MetricCollector'; -import { createInstrumentDescriptor, InstrumentDescriptor } from '../InstrumentDescriptor'; +import { + createInstrumentDescriptor, + InstrumentDescriptor, +} from '../InstrumentDescriptor'; /** * Internal interface. @@ -26,8 +29,7 @@ import { createInstrumentDescriptor, InstrumentDescriptor } from '../InstrumentD * Represents a storage from which we can collect metrics. */ export abstract class MetricStorage { - constructor(protected _instrumentDescriptor: InstrumentDescriptor) { - } + constructor(protected _instrumentDescriptor: InstrumentDescriptor) {} /** * Collects the metrics from this storage. @@ -38,21 +40,22 @@ export abstract class MetricStorage { abstract collect( collector: MetricCollectorHandle, collectors: MetricCollectorHandle[], - collectionTime: HrTime, + collectionTime: HrTime ): Maybe; getInstrumentDescriptor(): Readonly { return this._instrumentDescriptor; } - updateDescription(description: string): void{ + updateDescription(description: string): void { this._instrumentDescriptor = createInstrumentDescriptor( this._instrumentDescriptor.name, this._instrumentDescriptor.type, { description: description, valueType: this._instrumentDescriptor.valueType, - unit: this._instrumentDescriptor.unit - }); + unit: this._instrumentDescriptor.unit, + } + ); } } diff --git a/packages/sdk-metrics/src/state/MetricStorageRegistry.ts b/packages/sdk-metrics/src/state/MetricStorageRegistry.ts index c65a1dd7b2..5261b6ef09 100644 --- a/packages/sdk-metrics/src/state/MetricStorageRegistry.ts +++ b/packages/sdk-metrics/src/state/MetricStorageRegistry.ts @@ -15,9 +15,15 @@ */ import { MetricStorage } from './MetricStorage'; -import { InstrumentDescriptor, isDescriptorCompatibleWith } from '../InstrumentDescriptor'; +import { + InstrumentDescriptor, + isDescriptorCompatibleWith, +} from '../InstrumentDescriptor'; import * as api from '@opentelemetry/api'; -import { getConflictResolutionRecipe, getIncompatibilityDetails } from '../view/RegistrationConflicts'; +import { + getConflictResolutionRecipe, + getIncompatibilityDetails, +} from '../view/RegistrationConflicts'; import { MetricCollectorHandle } from './MetricCollector'; type StorageMap = Map; @@ -27,9 +33,12 @@ type StorageMap = Map; */ export class MetricStorageRegistry { private readonly _sharedRegistry: StorageMap = new Map(); - private readonly _perCollectorRegistry = new Map(); + private readonly _perCollectorRegistry = new Map< + MetricCollectorHandle, + StorageMap + >(); - static create(){ + static create() { return new MetricStorageRegistry(); } @@ -53,7 +62,10 @@ export class MetricStorageRegistry { this._registerStorage(storage, this._sharedRegistry); } - registerForCollector(collector: MetricCollectorHandle, storage: MetricStorage) { + registerForCollector( + collector: MetricCollectorHandle, + storage: MetricStorage + ) { let storageMap = this._perCollectorRegistry.get(collector); if (storageMap == null) { storageMap = new Map(); @@ -62,7 +74,9 @@ export class MetricStorageRegistry { this._registerStorage(storage, storageMap); } - findOrUpdateCompatibleStorage(expectedDescriptor: InstrumentDescriptor): T | null { + findOrUpdateCompatibleStorage( + expectedDescriptor: InstrumentDescriptor + ): T | null { const storages = this._sharedRegistry.get(expectedDescriptor.name); if (storages === undefined) { return null; @@ -73,7 +87,10 @@ export class MetricStorageRegistry { return this._findOrUpdateCompatibleStorage(expectedDescriptor, storages); } - findOrUpdateCompatibleCollectorStorage(collector: MetricCollectorHandle, expectedDescriptor: InstrumentDescriptor): T | null { + findOrUpdateCompatibleCollectorStorage( + collector: MetricCollectorHandle, + expectedDescriptor: InstrumentDescriptor + ): T | null { const storageMap = this._perCollectorRegistry.get(collector); if (storageMap === undefined) { return null; @@ -101,7 +118,10 @@ export class MetricStorageRegistry { storages.push(storage); } - private _findOrUpdateCompatibleStorage(expectedDescriptor: InstrumentDescriptor, existingStorages: MetricStorage[]): T | null { + private _findOrUpdateCompatibleStorage( + expectedDescriptor: InstrumentDescriptor, + existingStorages: MetricStorage[] + ): T | null { let compatibleStorage = null; for (const existingStorage of existingStorages) { @@ -110,30 +130,37 @@ export class MetricStorageRegistry { if (isDescriptorCompatibleWith(existingDescriptor, expectedDescriptor)) { // Use the longer description if it does not match. if (existingDescriptor.description !== expectedDescriptor.description) { - if (expectedDescriptor.description.length > existingDescriptor.description.length) { + if ( + expectedDescriptor.description.length > + existingDescriptor.description.length + ) { existingStorage.updateDescription(expectedDescriptor.description); } - api.diag.warn('A view or instrument with the name ', + api.diag.warn( + 'A view or instrument with the name ', expectedDescriptor.name, ' has already been registered, but has a different description and is incompatible with another registered view.\n', 'Details:\n', getIncompatibilityDetails(existingDescriptor, expectedDescriptor), 'The longer description will be used.\nTo resolve the conflict:', - getConflictResolutionRecipe(existingDescriptor, expectedDescriptor)); + getConflictResolutionRecipe(existingDescriptor, expectedDescriptor) + ); } // Storage is fully compatible. There will never be more than one pre-existing fully compatible storage. compatibleStorage = existingStorage as T; } else { // The implementation SHOULD warn about duplicate instrument registration // conflicts after applying View configuration. - api.diag.warn('A view or instrument with the name ', + api.diag.warn( + 'A view or instrument with the name ', expectedDescriptor.name, ' has already been registered and is incompatible with another registered view.\n', 'Details:\n', getIncompatibilityDetails(existingDescriptor, expectedDescriptor), 'To resolve the conflict:\n', - getConflictResolutionRecipe(existingDescriptor, expectedDescriptor)); + getConflictResolutionRecipe(existingDescriptor, expectedDescriptor) + ); } } diff --git a/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts b/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts index 64694793ed..1cf51f6728 100644 --- a/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts +++ b/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts @@ -23,7 +23,12 @@ import { WritableMetricStorage } from './WritableMetricStorage'; export class MultiMetricStorage implements WritableMetricStorage { constructor(private readonly _backingStorages: WritableMetricStorage[]) {} - record(value: number, attributes: MetricAttributes, context: Context, recordTime: HrTime) { + record( + value: number, + attributes: MetricAttributes, + context: Context, + recordTime: HrTime + ) { this._backingStorages.forEach(it => { it.record(value, attributes, context, recordTime); }); diff --git a/packages/sdk-metrics/src/state/ObservableRegistry.ts b/packages/sdk-metrics/src/state/ObservableRegistry.ts index bd3e3fc6a7..444395bca7 100644 --- a/packages/sdk-metrics/src/state/ObservableRegistry.ts +++ b/packages/sdk-metrics/src/state/ObservableRegistry.ts @@ -14,10 +14,24 @@ * limitations under the License. */ -import { diag, HrTime, BatchObservableCallback, Observable, ObservableCallback } from '@opentelemetry/api'; +import { + diag, + HrTime, + BatchObservableCallback, + Observable, + ObservableCallback, +} from '@opentelemetry/api'; import { isObservableInstrument, ObservableInstrument } from '../Instruments'; -import { BatchObservableResultImpl, ObservableResultImpl } from '../ObservableResult'; -import { callWithTimeout, PromiseAllSettled, isPromiseAllSettledRejectionResult, setEquals } from '../utils'; +import { + BatchObservableResultImpl, + ObservableResultImpl, +} from '../ObservableResult'; +import { + callWithTimeout, + PromiseAllSettled, + isPromiseAllSettledRejectionResult, + setEquals, +} from '../utils'; /** * Records for single instrument observable callback. @@ -53,7 +67,10 @@ export class ObservableRegistry { this._callbacks.push({ callback, instrument }); } - removeCallback(callback: ObservableCallback, instrument: ObservableInstrument) { + removeCallback( + callback: ObservableCallback, + instrument: ObservableInstrument + ) { const idx = this._findCallback(callback, instrument); if (idx < 0) { return; @@ -61,11 +78,19 @@ export class ObservableRegistry { this._callbacks.splice(idx, 1); } - addBatchCallback(callback: BatchObservableCallback, instruments: Observable[]) { + addBatchCallback( + callback: BatchObservableCallback, + instruments: Observable[] + ) { // Create a set of unique instruments. - const observableInstruments = new Set(instruments.filter(isObservableInstrument)); + const observableInstruments = new Set( + instruments.filter(isObservableInstrument) + ); if (observableInstruments.size === 0) { - diag.error('BatchObservableCallback is not associated with valid instruments', instruments); + diag.error( + 'BatchObservableCallback is not associated with valid instruments', + instruments + ); return; } const idx = this._findBatchCallback(callback, observableInstruments); @@ -75,9 +100,14 @@ export class ObservableRegistry { this._batchCallbacks.push({ callback, instruments: observableInstruments }); } - removeBatchCallback(callback: BatchObservableCallback, instruments: Observable[]) { + removeBatchCallback( + callback: BatchObservableCallback, + instruments: Observable[] + ) { // Create a set of unique instruments. - const observableInstruments = new Set(instruments.filter(isObservableInstrument)); + const observableInstruments = new Set( + instruments.filter(isObservableInstrument) + ); const idx = this._findBatchCallback(callback, observableInstruments); if (idx < 0) { return; @@ -88,62 +118,89 @@ export class ObservableRegistry { /** * @returns a promise of rejected reasons for invoking callbacks. */ - async observe(collectionTime: HrTime, timeoutMillis?: number): Promise { - const callbackFutures = this._observeCallbacks(collectionTime, timeoutMillis); - const batchCallbackFutures = this._observeBatchCallbacks(collectionTime, timeoutMillis); + async observe( + collectionTime: HrTime, + timeoutMillis?: number + ): Promise { + const callbackFutures = this._observeCallbacks( + collectionTime, + timeoutMillis + ); + const batchCallbackFutures = this._observeBatchCallbacks( + collectionTime, + timeoutMillis + ); - const results = await PromiseAllSettled([...callbackFutures, ...batchCallbackFutures]); + const results = await PromiseAllSettled([ + ...callbackFutures, + ...batchCallbackFutures, + ]); - const rejections = results.filter(isPromiseAllSettledRejectionResult) + const rejections = results + .filter(isPromiseAllSettledRejectionResult) .map(it => it.reason); return rejections; } private _observeCallbacks(observationTime: HrTime, timeoutMillis?: number) { - return this._callbacks - .map(async ({ callback, instrument }) => { - const observableResult = new ObservableResultImpl(instrument._descriptor); - let callPromise: Promise = Promise.resolve(callback(observableResult)); - if (timeoutMillis != null) { - callPromise = callWithTimeout(callPromise, timeoutMillis); - } - await callPromise; - instrument._metricStorages.forEach(metricStorage => { - metricStorage.record(observableResult._buffer, observationTime); - }); + return this._callbacks.map(async ({ callback, instrument }) => { + const observableResult = new ObservableResultImpl(instrument._descriptor); + let callPromise: Promise = Promise.resolve( + callback(observableResult) + ); + if (timeoutMillis != null) { + callPromise = callWithTimeout(callPromise, timeoutMillis); + } + await callPromise; + instrument._metricStorages.forEach(metricStorage => { + metricStorage.record(observableResult._buffer, observationTime); }); + }); } - private _observeBatchCallbacks(observationTime: HrTime, timeoutMillis?: number) { - return this._batchCallbacks - .map(async ({ callback, instruments }) => { - const observableResult = new BatchObservableResultImpl(); - let callPromise: Promise = Promise.resolve(callback(observableResult)); - if (timeoutMillis != null) { - callPromise = callWithTimeout(callPromise, timeoutMillis); + private _observeBatchCallbacks( + observationTime: HrTime, + timeoutMillis?: number + ) { + return this._batchCallbacks.map(async ({ callback, instruments }) => { + const observableResult = new BatchObservableResultImpl(); + let callPromise: Promise = Promise.resolve( + callback(observableResult) + ); + if (timeoutMillis != null) { + callPromise = callWithTimeout(callPromise, timeoutMillis); + } + await callPromise; + instruments.forEach(instrument => { + const buffer = observableResult._buffer.get(instrument); + if (buffer == null) { + return; } - await callPromise; - instruments.forEach(instrument => { - const buffer = observableResult._buffer.get(instrument); - if (buffer == null) { - return; - } - instrument._metricStorages.forEach(metricStorage => { - metricStorage.record(buffer, observationTime); - }); + instrument._metricStorages.forEach(metricStorage => { + metricStorage.record(buffer, observationTime); }); }); + }); } - private _findCallback(callback: ObservableCallback, instrument: ObservableInstrument) { + private _findCallback( + callback: ObservableCallback, + instrument: ObservableInstrument + ) { return this._callbacks.findIndex(record => { return record.callback === callback && record.instrument === instrument; }); } - private _findBatchCallback(callback: BatchObservableCallback, instruments: Set) { + private _findBatchCallback( + callback: BatchObservableCallback, + instruments: Set + ) { return this._batchCallbacks.findIndex(record => { - return record.callback === callback && setEquals(record.instruments, instruments); + return ( + record.callback === callback && + setEquals(record.instruments, instruments) + ); }); } } diff --git a/packages/sdk-metrics/src/state/SyncMetricStorage.ts b/packages/sdk-metrics/src/state/SyncMetricStorage.ts index 7bfd5967b2..0648b12728 100644 --- a/packages/sdk-metrics/src/state/SyncMetricStorage.ts +++ b/packages/sdk-metrics/src/state/SyncMetricStorage.ts @@ -31,7 +31,10 @@ import { MetricCollectorHandle } from './MetricCollector'; * * Stores and aggregates {@link MetricData} for synchronous instruments. */ -export class SyncMetricStorage> extends MetricStorage implements WritableMetricStorage { +export class SyncMetricStorage> + extends MetricStorage + implements WritableMetricStorage +{ private _deltaMetricStorage: DeltaMetricProcessor; private _temporalMetricStorage: TemporalMetricProcessor; @@ -45,7 +48,12 @@ export class SyncMetricStorage> extends MetricStor this._temporalMetricStorage = new TemporalMetricProcessor(aggregator); } - record(value: number, attributes: MetricAttributes, context: Context, recordTime: HrTime) { + record( + value: number, + attributes: MetricAttributes, + context: Context, + recordTime: HrTime + ) { attributes = this._attributesProcessor.process(attributes, context); this._deltaMetricStorage.record(value, attributes, context, recordTime); } @@ -59,7 +67,7 @@ export class SyncMetricStorage> extends MetricStor collect( collector: MetricCollectorHandle, collectors: MetricCollectorHandle[], - collectionTime: HrTime, + collectionTime: HrTime ): Maybe { const accumulations = this._deltaMetricStorage.collect(); diff --git a/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts b/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts index 7e0000d775..2b9c5dbbaa 100644 --- a/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts +++ b/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts @@ -15,7 +15,11 @@ */ import { HrTime } from '@opentelemetry/api'; -import { Accumulation, AccumulationRecord, Aggregator } from '../aggregator/types'; +import { + Accumulation, + AccumulationRecord, + Aggregator, +} from '../aggregator/types'; import { MetricData } from '../export/MetricData'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; import { AggregationTemporality } from '../export/AggregationTemporality'; @@ -48,8 +52,14 @@ interface LastReportedHistory> { * of metrics and reports given temporality values. */ export class TemporalMetricProcessor> { - private _unreportedAccumulations = new Map[]>(); - private _reportHistory = new Map>(); + private _unreportedAccumulations = new Map< + MetricCollectorHandle, + AttributeHashMap[] + >(); + private _reportHistory = new Map< + MetricCollectorHandle, + LastReportedHistory + >(); constructor(private _aggregator: Aggregator) {} @@ -67,10 +77,11 @@ export class TemporalMetricProcessor> { collectors: MetricCollectorHandle[], instrumentDescriptor: InstrumentDescriptor, currentAccumulations: AttributeHashMap, - collectionTime: HrTime, + collectionTime: HrTime ): Maybe { this._stashAccumulations(collectors, currentAccumulations); - const unreportedAccumulations = this._getMergedUnreportedAccumulations(collector); + const unreportedAccumulations = + this._getMergedUnreportedAccumulations(collector); let result = unreportedAccumulations; let aggregationTemporality: AggregationTemporality; @@ -96,13 +107,23 @@ export class TemporalMetricProcessor> { if (aggregationTemporality === AggregationTemporality.CUMULATIVE) { // We need to make sure the current delta recording gets merged into the previous cumulative // for the next cumulative recording. - result = TemporalMetricProcessor.merge(last.accumulations, unreportedAccumulations, this._aggregator); + result = TemporalMetricProcessor.merge( + last.accumulations, + unreportedAccumulations, + this._aggregator + ); } else { - result = TemporalMetricProcessor.calibrateStartTime(last.accumulations, unreportedAccumulations, lastCollectionTime); + result = TemporalMetricProcessor.calibrateStartTime( + last.accumulations, + unreportedAccumulations, + lastCollectionTime + ); } } else { // Call into user code to select aggregation temporality for the instrument. - aggregationTemporality = collector.selectAggregationTemporality(instrumentDescriptor.type); + aggregationTemporality = collector.selectAggregationTemporality( + instrumentDescriptor.type + ); } // Update last reported (cumulative) accumulation. @@ -116,10 +137,14 @@ export class TemporalMetricProcessor> { instrumentDescriptor, aggregationTemporality, AttributesMapToAccumulationRecords(result), - /* endTime */ collectionTime); + /* endTime */ collectionTime + ); } - private _stashAccumulations(collectors: MetricCollectorHandle[], currentAccumulation: AttributeHashMap) { + private _stashAccumulations( + collectors: MetricCollectorHandle[], + currentAccumulation: AttributeHashMap + ) { collectors.forEach(it => { let stash = this._unreportedAccumulations.get(it); if (stash === undefined) { @@ -143,7 +168,11 @@ export class TemporalMetricProcessor> { return result; } - static merge>(last: AttributeHashMap, current: AttributeHashMap, aggregator: Aggregator) { + static merge>( + last: AttributeHashMap, + current: AttributeHashMap, + aggregator: Aggregator + ) { const result = last; const iterator = current.entries(); let next = iterator.next(); @@ -168,7 +197,11 @@ export class TemporalMetricProcessor> { * Calibrate the reported metric streams' startTime to lastCollectionTime. Leaves * the new stream to be the initial observation time unchanged. */ - static calibrateStartTime>(last: AttributeHashMap, current: AttributeHashMap, lastCollectionTime: HrTime) { + static calibrateStartTime>( + last: AttributeHashMap, + current: AttributeHashMap, + lastCollectionTime: HrTime + ) { for (const [key, hash] of last.keys()) { const currentAccumulation = current.get(key, hash); currentAccumulation?.setStartTime(lastCollectionTime); @@ -178,6 +211,8 @@ export class TemporalMetricProcessor> { } // TypeScript complains about converting 3 elements tuple to AccumulationRecord. -function AttributesMapToAccumulationRecords(map: AttributeHashMap): AccumulationRecord[] { +function AttributesMapToAccumulationRecords( + map: AttributeHashMap +): AccumulationRecord[] { return Array.from(map.entries()) as unknown as AccumulationRecord[]; } diff --git a/packages/sdk-metrics/src/state/WritableMetricStorage.ts b/packages/sdk-metrics/src/state/WritableMetricStorage.ts index 72945cd01d..223f34d9e9 100644 --- a/packages/sdk-metrics/src/state/WritableMetricStorage.ts +++ b/packages/sdk-metrics/src/state/WritableMetricStorage.ts @@ -25,7 +25,12 @@ import { AttributeHashMap } from './HashMap'; */ export interface WritableMetricStorage { /** Records a measurement. */ - record(value: number, attributes: MetricAttributes, context: Context, recordTime: HrTime): void; + record( + value: number, + attributes: MetricAttributes, + context: Context, + recordTime: HrTime + ): void; } /** diff --git a/packages/sdk-metrics/src/types.ts b/packages/sdk-metrics/src/types.ts index 84f6fc354e..89af1eea33 100644 --- a/packages/sdk-metrics/src/types.ts +++ b/packages/sdk-metrics/src/types.ts @@ -15,7 +15,7 @@ */ export type CommonReaderOptions = { - timeoutMillis?: number + timeoutMillis?: number; }; export type CollectionOptions = CommonReaderOptions; diff --git a/packages/sdk-metrics/src/utils.ts b/packages/sdk-metrics/src/utils.ts index 532e05d227..c2bc440849 100644 --- a/packages/sdk-metrics/src/utils.ts +++ b/packages/sdk-metrics/src/utils.ts @@ -40,8 +40,12 @@ export function hashAttributes(attributes: MetricAttributes): string { * Converting the instrumentation scope object to a unique identifier string. * @param instrumentationScope */ -export function instrumentationScopeId(instrumentationScope: InstrumentationScope): string { - return `${instrumentationScope.name}:${instrumentationScope.version ?? ''}:${instrumentationScope.schemaUrl ?? ''}`; +export function instrumentationScopeId( + instrumentationScope: InstrumentationScope +): string { + return `${instrumentationScope.name}:${instrumentationScope.version ?? ''}:${ + instrumentationScope.schemaUrl ?? '' + }`; } /** @@ -66,26 +70,31 @@ export class TimeoutError extends Error { * @param promise promise to use with timeout. * @param timeout the timeout in milliseconds until the returned promise is rejected. */ -export function callWithTimeout(promise: Promise, timeout: number): Promise { +export function callWithTimeout( + promise: Promise, + timeout: number +): Promise { let timeoutHandle: ReturnType; - const timeoutPromise = new Promise(function timeoutFunction(_resolve, reject) { - timeoutHandle = setTimeout( - function timeoutHandler() { - reject(new TimeoutError('Operation timed out.')); - }, - timeout - ); + const timeoutPromise = new Promise(function timeoutFunction( + _resolve, + reject + ) { + timeoutHandle = setTimeout(function timeoutHandler() { + reject(new TimeoutError('Operation timed out.')); + }, timeout); }); - return Promise.race([promise, timeoutPromise]).then(result => { - clearTimeout(timeoutHandle); - return result; - }, - reason => { - clearTimeout(timeoutHandle); - throw reason; - }); + return Promise.race([promise, timeoutPromise]).then( + result => { + clearTimeout(timeoutHandle); + return result; + }, + reason => { + clearTimeout(timeoutHandle); + throw reason; + } + ); } export interface PromiseAllSettledFulfillResult { @@ -98,29 +107,37 @@ export interface PromiseAllSettledRejectionResult { reason: unknown; } -export type PromiseAllSettledResult = PromiseAllSettledFulfillResult | PromiseAllSettledRejectionResult; +export type PromiseAllSettledResult = + | PromiseAllSettledFulfillResult + | PromiseAllSettledRejectionResult; /** * Node.js v12.9 lower and browser compatible `Promise.allSettled`. */ -export async function PromiseAllSettled(promises: Promise[]): Promise[]> { - return Promise.all(promises.map>>(async p => { - try { - const ret = await p; - return { - status: 'fulfilled', - value: ret, - }; - } catch (e) { - return { - status: 'rejected', - reason: e, - }; - } - })); +export async function PromiseAllSettled( + promises: Promise[] +): Promise[]> { + return Promise.all( + promises.map>>(async p => { + try { + const ret = await p; + return { + status: 'fulfilled', + value: ret, + }; + } catch (e) { + return { + status: 'rejected', + reason: e, + }; + } + }) + ); } -export function isPromiseAllSettledRejectionResult(it: PromiseAllSettledResult): it is PromiseAllSettledRejectionResult { +export function isPromiseAllSettledRejectionResult( + it: PromiseAllSettledResult +): it is PromiseAllSettledRejectionResult { return it.status === 'rejected'; } diff --git a/packages/sdk-metrics/src/view/Aggregation.ts b/packages/sdk-metrics/src/view/Aggregation.ts index ed6cdbb50a..1b73b7c0a4 100644 --- a/packages/sdk-metrics/src/view/Aggregation.ts +++ b/packages/sdk-metrics/src/view/Aggregation.ts @@ -15,7 +15,13 @@ */ import * as api from '@opentelemetry/api'; -import { Aggregator, SumAggregator, DropAggregator, LastValueAggregator, HistogramAggregator } from '../aggregator'; +import { + Aggregator, + SumAggregator, + DropAggregator, + LastValueAggregator, + HistogramAggregator, +} from '../aggregator'; import { Accumulation } from '../aggregator/types'; import { InstrumentDescriptor, InstrumentType } from '../InstrumentDescriptor'; import { Maybe } from '../utils'; @@ -26,7 +32,9 @@ import { Maybe } from '../utils'; * Aggregation provides a set of built-in aggregations via static methods. */ export abstract class Aggregation { - abstract createAggregator(instrument: InstrumentDescriptor): Aggregator>; + abstract createAggregator( + instrument: InstrumentDescriptor + ): Aggregator>; static Drop(): Aggregation { return DROP_AGGREGATION; @@ -93,7 +101,10 @@ export class LastValueAggregation extends Aggregation { * The default histogram aggregation. */ export class HistogramAggregation extends Aggregation { - private static DEFAULT_INSTANCE = new HistogramAggregator([0, 5, 10, 25, 50, 75, 100, 250, 500, 1000], true); + private static DEFAULT_INSTANCE = new HistogramAggregator( + [0, 5, 10, 25, 50, 75, 100, 250, 500, 1000], + true + ); createAggregator(_instrument: InstrumentDescriptor) { return HistogramAggregation.DEFAULT_INSTANCE; } @@ -157,7 +168,9 @@ export class DefaultAggregation extends Aggregation { return DROP_AGGREGATION; } - createAggregator(instrument: InstrumentDescriptor): Aggregator> { + createAggregator( + instrument: InstrumentDescriptor + ): Aggregator> { return this._resolve(instrument).createAggregator(instrument); } } diff --git a/packages/sdk-metrics/src/view/AttributesProcessor.ts b/packages/sdk-metrics/src/view/AttributesProcessor.ts index 96858c6428..53f2cddaff 100644 --- a/packages/sdk-metrics/src/view/AttributesProcessor.ts +++ b/packages/sdk-metrics/src/view/AttributesProcessor.ts @@ -29,7 +29,10 @@ export abstract class AttributesProcessor { * @param context The active context when the instrument is synchronous. * `undefined` otherwise. */ - abstract process(incoming: MetricAttributes, context?: Context): MetricAttributes; + abstract process( + incoming: MetricAttributes, + context?: Context + ): MetricAttributes; static Noop() { return NOOP; @@ -54,10 +57,15 @@ export class FilteringAttributesProcessor extends AttributesProcessor { process(incoming: MetricAttributes, _context: Context): MetricAttributes { const filteredAttributes: MetricAttributes = {}; Object.keys(incoming) - .filter(attributeName => this._allowedAttributeNames.includes(attributeName)) - .forEach(attributeName => filteredAttributes[attributeName] = incoming[attributeName]); + .filter(attributeName => + this._allowedAttributeNames.includes(attributeName) + ) + .forEach( + attributeName => + (filteredAttributes[attributeName] = incoming[attributeName]) + ); return filteredAttributes; } } -const NOOP = new NoopAttributesProcessor; +const NOOP = new NoopAttributesProcessor(); diff --git a/packages/sdk-metrics/src/view/Predicate.ts b/packages/sdk-metrics/src/view/Predicate.ts index c12c32bbb2..49e3b4821b 100644 --- a/packages/sdk-metrics/src/view/Predicate.ts +++ b/packages/sdk-metrics/src/view/Predicate.ts @@ -52,7 +52,7 @@ export class PatternPredicate implements Predicate { return `^${pattern.replace(ESCAPE, '\\$&').replace('*', '.*')}$`; } - static hasWildcard(pattern: string): boolean{ + static hasWildcard(pattern: string): boolean { return pattern.includes('*'); } } diff --git a/packages/sdk-metrics/src/view/RegistrationConflicts.ts b/packages/sdk-metrics/src/view/RegistrationConflicts.ts index bc0063fe21..e74add7334 100644 --- a/packages/sdk-metrics/src/view/RegistrationConflicts.ts +++ b/packages/sdk-metrics/src/view/RegistrationConflicts.ts @@ -17,7 +17,10 @@ import { InstrumentSelectorCriteria } from './InstrumentSelector'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; -export function getIncompatibilityDetails(existing: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor) { +export function getIncompatibilityDetails( + existing: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +) { let incompatibility = ''; if (existing.unit !== otherDescriptor.unit) { incompatibility += `\t- Unit '${existing.unit}' does not match '${otherDescriptor.unit}'\n`; @@ -35,18 +38,27 @@ export function getIncompatibilityDetails(existing: InstrumentDescriptor, otherD return incompatibility; } -export function getValueTypeConflictResolutionRecipe(existing: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor) { +export function getValueTypeConflictResolutionRecipe( + existing: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +) { return `\t- use valueType '${existing.valueType}' on instrument creation or use an instrument name other than '${otherDescriptor.name}'`; } -export function getUnitConflictResolutionRecipe(existing: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor) { +export function getUnitConflictResolutionRecipe( + existing: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +) { return `\t- use unit '${existing.unit}' on instrument creation or use an instrument name other than '${otherDescriptor.name}'`; } -export function getTypeConflictResolutionRecipe(existing: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor) { +export function getTypeConflictResolutionRecipe( + existing: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +) { const selector: InstrumentSelectorCriteria = { name: otherDescriptor.name, - type: otherDescriptor.type + type: otherDescriptor.type, }; const selectorString = JSON.stringify(selector); @@ -54,10 +66,13 @@ export function getTypeConflictResolutionRecipe(existing: InstrumentDescriptor, return `\t- create a new view with a name other than '${existing.name}' and InstrumentSelector '${selectorString}'`; } -export function getDescriptionResolutionRecipe(existing: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor): string { +export function getDescriptionResolutionRecipe( + existing: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +): string { const selector: InstrumentSelectorCriteria = { name: otherDescriptor.name, - type: otherDescriptor.type + type: otherDescriptor.type, }; const selectorString = JSON.stringify(selector); @@ -67,7 +82,10 @@ export function getDescriptionResolutionRecipe(existing: InstrumentDescriptor, o \t- OR - create a new view with the name ${otherDescriptor.name} and description '${existing.description}' and InstrumentSelector ${selectorString}`; } -export function getConflictResolutionRecipe(existing: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor): string { +export function getConflictResolutionRecipe( + existing: InstrumentDescriptor, + otherDescriptor: InstrumentDescriptor +): string { // Conflicts that cannot be solved via views. if (existing.valueType !== otherDescriptor.valueType) { return getValueTypeConflictResolutionRecipe(existing, otherDescriptor); diff --git a/packages/sdk-metrics/src/view/View.ts b/packages/sdk-metrics/src/view/View.ts index ecc3f92c5c..398f936536 100644 --- a/packages/sdk-metrics/src/view/View.ts +++ b/packages/sdk-metrics/src/view/View.ts @@ -15,7 +15,10 @@ */ import { PatternPredicate } from './Predicate'; -import { AttributesProcessor, FilteringAttributesProcessor } from './AttributesProcessor'; +import { + AttributesProcessor, + FilteringAttributesProcessor, +} from './AttributesProcessor'; import { InstrumentSelector } from './InstrumentSelector'; import { MeterSelector } from './MeterSelector'; import { Aggregation } from './Aggregation'; @@ -107,11 +110,13 @@ export type ViewOptions = { }; function isSelectorNotProvided(options: ViewOptions): boolean { - return (options.instrumentName == null && + return ( + options.instrumentName == null && options.instrumentType == null && options.meterName == null && options.meterVersion == null && - options.meterSchemaUrl == null); + options.meterSchemaUrl == null + ); } /** @@ -183,15 +188,21 @@ export class View { // the SDK SHOULD NOT allow Views with a specified name to be declared with instrument selectors that // may select more than one instrument (e.g. wild card instrument name) in the same Meter. - if (viewOptions.name != null && + if ( + viewOptions.name != null && (viewOptions?.instrumentName == null || - PatternPredicate.hasWildcard(viewOptions.instrumentName))) { - throw new Error('Views with a specified name must be declared with an instrument selector that selects at most one instrument per meter.'); + PatternPredicate.hasWildcard(viewOptions.instrumentName)) + ) { + throw new Error( + 'Views with a specified name must be declared with an instrument selector that selects at most one instrument per meter.' + ); } // Create AttributesProcessor if attributeKeys are defined set. if (viewOptions.attributeKeys != null) { - this.attributesProcessor = new FilteringAttributesProcessor(viewOptions.attributeKeys); + this.attributesProcessor = new FilteringAttributesProcessor( + viewOptions.attributeKeys + ); } else { this.attributesProcessor = AttributesProcessor.Noop(); } @@ -206,7 +217,7 @@ export class View { this.meterSelector = new MeterSelector({ name: viewOptions.meterName, version: viewOptions.meterVersion, - schemaUrl: viewOptions.meterSchemaUrl + schemaUrl: viewOptions.meterSchemaUrl, }); } } diff --git a/packages/sdk-metrics/src/view/ViewRegistry.ts b/packages/sdk-metrics/src/view/ViewRegistry.ts index 1dcaf7d22f..265f699bf9 100644 --- a/packages/sdk-metrics/src/view/ViewRegistry.ts +++ b/packages/sdk-metrics/src/view/ViewRegistry.ts @@ -27,24 +27,41 @@ export class ViewRegistry { this._registeredViews.push(view); } - findViews(instrument: InstrumentDescriptor, meter: InstrumentationScope): View[] { - const views = this._registeredViews - .filter(registeredView => { - return this._matchInstrument(registeredView.instrumentSelector, instrument) && - this._matchMeter(registeredView.meterSelector, meter); - }); + findViews( + instrument: InstrumentDescriptor, + meter: InstrumentationScope + ): View[] { + const views = this._registeredViews.filter(registeredView => { + return ( + this._matchInstrument(registeredView.instrumentSelector, instrument) && + this._matchMeter(registeredView.meterSelector, meter) + ); + }); return views; } - private _matchInstrument(selector: InstrumentSelector, instrument: InstrumentDescriptor): boolean { - return (selector.getType() === undefined || instrument.type === selector.getType()) && - selector.getNameFilter().match(instrument.name); + private _matchInstrument( + selector: InstrumentSelector, + instrument: InstrumentDescriptor + ): boolean { + return ( + (selector.getType() === undefined || + instrument.type === selector.getType()) && + selector.getNameFilter().match(instrument.name) + ); } - private _matchMeter(selector: MeterSelector, meter: InstrumentationScope): boolean { - return selector.getNameFilter().match(meter.name) && - (meter.version === undefined || selector.getVersionFilter().match(meter.version)) && - (meter.schemaUrl === undefined || selector.getSchemaUrlFilter().match(meter.schemaUrl)); + private _matchMeter( + selector: MeterSelector, + meter: InstrumentationScope + ): boolean { + return ( + selector.getNameFilter().match(meter.name) && + (meter.version === undefined || + selector.getVersionFilter().match(meter.version)) && + (meter.schemaUrl === undefined || + selector.getSchemaUrlFilter().match(meter.schemaUrl)) + ); } } diff --git a/packages/sdk-metrics/test/ExemplarFilter.test.ts b/packages/sdk-metrics/test/ExemplarFilter.test.ts index 7365bbbd60..df472fe941 100644 --- a/packages/sdk-metrics/test/ExemplarFilter.test.ts +++ b/packages/sdk-metrics/test/ExemplarFilter.test.ts @@ -15,15 +15,19 @@ */ import * as assert from 'assert'; -import { ROOT_CONTEXT, SpanContext, TraceFlags, trace } from '@opentelemetry/api'; +import { + ROOT_CONTEXT, + SpanContext, + TraceFlags, + trace, +} from '@opentelemetry/api'; import { AlwaysSampleExemplarFilter, NeverSampleExemplarFilter, - WithTraceExemplarFilter + WithTraceExemplarFilter, } from '../src/exemplar/'; - describe('ExemplarFilter', () => { const TRACE_ID = 'd4cda95b652f4a1592b449d5929fda1b'; const SPAN_ID = '6e0c63257de34c92'; @@ -31,14 +35,20 @@ describe('ExemplarFilter', () => { describe('AlwaysSampleExemplarFilter', () => { it('should return true always for shouldSample', () => { const filter = new AlwaysSampleExemplarFilter(); - assert.strictEqual(filter.shouldSample(10, [0, 0], {}, ROOT_CONTEXT), true); + assert.strictEqual( + filter.shouldSample(10, [0, 0], {}, ROOT_CONTEXT), + true + ); }); }); describe('NeverSampleExemplarFilter', () => { it('should return false always for shouldSample', () => { const filter = new NeverSampleExemplarFilter(); - assert.strictEqual(filter.shouldSample(1, [0, 0], {}, ROOT_CONTEXT), false); + assert.strictEqual( + filter.shouldSample(1, [0, 0], {}, ROOT_CONTEXT), + false + ); }); }); @@ -51,7 +61,7 @@ describe('ExemplarFilter', () => { traceFlags: TraceFlags.NONE, }; const ctx = trace.setSpanContext(ROOT_CONTEXT, spanContext); - assert.strictEqual(filter.shouldSample(5.3, [0, 0,], {}, ctx), false); + assert.strictEqual(filter.shouldSample(5.3, [0, 0], {}, ctx), false); }); it('should return true for shouldSample when the trace is sampled', () => { @@ -62,7 +72,7 @@ describe('ExemplarFilter', () => { traceFlags: TraceFlags.SAMPLED, }; const ctx = trace.setSpanContext(ROOT_CONTEXT, spanContext); - assert.strictEqual(filter.shouldSample(5.3, [0, 0,], {}, ctx), true); + assert.strictEqual(filter.shouldSample(5.3, [0, 0], {}, ctx), true); }); }); }); diff --git a/packages/sdk-metrics/test/ExemplarReservoir.test.ts b/packages/sdk-metrics/test/ExemplarReservoir.test.ts index d6917c172f..8da5006e3d 100644 --- a/packages/sdk-metrics/test/ExemplarReservoir.test.ts +++ b/packages/sdk-metrics/test/ExemplarReservoir.test.ts @@ -14,7 +14,12 @@ * limitations under the License. */ -import { ROOT_CONTEXT, SpanContext, TraceFlags, trace } from '@opentelemetry/api'; +import { + ROOT_CONTEXT, + SpanContext, + TraceFlags, + trace, +} from '@opentelemetry/api'; import { hrTime } from '@opentelemetry/core'; import * as assert from 'assert'; @@ -24,7 +29,6 @@ import { } from '../src/exemplar'; describe('ExemplarReservoir', () => { - const TRACE_ID = 'd4cda95b652f4a1592b449d5929fda1b'; const SPAN_ID = '6e0c63257de34c92'; @@ -49,28 +53,37 @@ describe('ExemplarReservoir', () => { assert.strictEqual(exemplars[0].traceId, TRACE_ID); assert.strictEqual(exemplars[0].spanId, SPAN_ID); }); - }); it('should filter the attributes', () => { const reservoir = new SimpleFixedSizeExemplarReservoir(1); - reservoir.offer(1, hrTime(), {'key1': 'value1', 'key2': 'value2'}, ROOT_CONTEXT); - const exemplars = reservoir.collect({'key2': 'value2', 'key3': 'value3'}); - assert.notStrictEqual(exemplars[0].filteredAttributes, {'key1': 'value1'}); + reservoir.offer( + 1, + hrTime(), + { key1: 'value1', key2: 'value2' }, + ROOT_CONTEXT + ); + const exemplars = reservoir.collect({ key2: 'value2', key3: 'value3' }); + assert.notStrictEqual(exemplars[0].filteredAttributes, { key1: 'value1' }); }); describe('AlignedHistogramBucketExemplarReservoir', () => { it('should put measurements into buckets', () => { - const reservoir = new AlignedHistogramBucketExemplarReservoir([0, 5, 10, 25, 50, 75]); - reservoir.offer(52, hrTime(), {'bucket': '5'}, ROOT_CONTEXT); - reservoir.offer(7, hrTime(), {'bucket': '3'}, ROOT_CONTEXT); - reservoir.offer(6, hrTime(), {'bucket': '3'}, ROOT_CONTEXT); - const exemplars = reservoir.collect({'bucket': '3'}); + const reservoir = new AlignedHistogramBucketExemplarReservoir([ + 0, 5, 10, 25, 50, 75, + ]); + reservoir.offer(52, hrTime(), { bucket: '5' }, ROOT_CONTEXT); + reservoir.offer(7, hrTime(), { bucket: '3' }, ROOT_CONTEXT); + reservoir.offer(6, hrTime(), { bucket: '3' }, ROOT_CONTEXT); + const exemplars = reservoir.collect({ bucket: '3' }); assert.strictEqual(exemplars.length, 2); assert.strictEqual(exemplars[0].value, 6); - assert.strictEqual(Object.keys(exemplars[0].filteredAttributes).length, 0); + assert.strictEqual( + Object.keys(exemplars[0].filteredAttributes).length, + 0 + ); assert.strictEqual(exemplars[1].value, 52); - assert.notStrictEqual(exemplars[1].filteredAttributes, {'bucket':'5'}); + assert.notStrictEqual(exemplars[1].filteredAttributes, { bucket: '5' }); }); }); }); diff --git a/packages/sdk-metrics/test/InstrumentDescriptor.test.ts b/packages/sdk-metrics/test/InstrumentDescriptor.test.ts index 22397faea4..7d8796cbf1 100644 --- a/packages/sdk-metrics/test/InstrumentDescriptor.test.ts +++ b/packages/sdk-metrics/test/InstrumentDescriptor.test.ts @@ -15,15 +15,22 @@ */ import * as assert from 'assert'; -import {createInstrumentDescriptor, InstrumentType} from '../src/InstrumentDescriptor'; +import { + createInstrumentDescriptor, + InstrumentType, +} from '../src/InstrumentDescriptor'; describe('InstrumentDescriptor', () => { describe('createInstrumentDescriptor', () => { for (const val of [null, undefined]) { it(`should interpret an empty unit value as a blank string (${val})`, () => { - const result = createInstrumentDescriptor('example', InstrumentType.COUNTER, { - unit: val as any, - }); + const result = createInstrumentDescriptor( + 'example', + InstrumentType.COUNTER, + { + unit: val as any, + } + ); assert.strictEqual(result.unit, ''); }); } diff --git a/packages/sdk-metrics/test/Instruments.test.ts b/packages/sdk-metrics/test/Instruments.test.ts index 1faa5b64a1..ba0f86bdd4 100644 --- a/packages/sdk-metrics/test/Instruments.test.ts +++ b/packages/sdk-metrics/test/Instruments.test.ts @@ -25,16 +25,19 @@ import { MetricReader, DataPoint, DataPointType, - Histogram + Histogram, } from '../src'; -import { TestDeltaMetricReader, TestMetricReader } from './export/TestMetricReader'; +import { + TestDeltaMetricReader, + TestMetricReader, +} from './export/TestMetricReader'; import { assertMetricData, assertDataPoint, commonValues, commonAttributes, defaultResource, - defaultInstrumentationScope + defaultInstrumentationScope, } from './util'; import { ObservableResult, ValueType } from '@opentelemetry/api'; @@ -212,7 +215,7 @@ describe('Instruments', () => { { attributes: { foo: 'bar' }, value: 5, - } + }, ], }); }); @@ -238,7 +241,7 @@ describe('Instruments', () => { { attributes: { foo: 'bar' }, value: 5.1, - } + }, ], }); }); @@ -254,7 +257,6 @@ describe('Instruments', () => { for (const values of commonValues) { for (const attributes of commonAttributes) { - histogram.record(values, attributes); } } @@ -301,7 +303,7 @@ describe('Instruments', () => { count: 2, sum: 10, max: 10, - min: 0 + min: 0, }, }, { @@ -314,7 +316,7 @@ describe('Instruments', () => { count: 2, sum: 100, max: 100, - min: 0 + min: 0, }, }, ], @@ -356,9 +358,9 @@ describe('Instruments', () => { count: 2, sum: 110, min: 20, - max: 90 + max: 90, }, - } + }, ], }); @@ -383,9 +385,9 @@ describe('Instruments', () => { count: 4, sum: 220, min: 10, - max: 100 + max: 100, }, - } + }, ], }); }); @@ -426,7 +428,7 @@ describe('Instruments', () => { count: 2, sum: 10.1, max: 10, - min: 0.1 + min: 0.1, }, }, { @@ -439,7 +441,7 @@ describe('Instruments', () => { count: 2, sum: 100.1, max: 100, - min: 0.1 + min: 0.1, }, }, ], @@ -527,7 +529,8 @@ describe('Instruments', () => { } } }); - const observableUpDownCounter = meter.createObservableUpDownCounter('test'); + const observableUpDownCounter = + meter.createObservableUpDownCounter('test'); observableUpDownCounter.addCallback(callback); await deltaReader.collect(); @@ -537,7 +540,8 @@ describe('Instruments', () => { it('should observe values', async () => { const { meter, cumulativeReader } = setup(); let callCount = 0; - const observableUpDownCounter = meter.createObservableUpDownCounter('test'); + const observableUpDownCounter = + meter.createObservableUpDownCounter('test'); observableUpDownCounter.addCallback(observableResult => { observableResult.observe(++callCount); observableResult.observe(1, { foo: 'bar' }); @@ -650,9 +654,13 @@ describe('Instruments', () => { function setup() { const meterProvider = new MeterProvider({ resource: defaultResource }); - const meter = meterProvider.getMeter(defaultInstrumentationScope.name, defaultInstrumentationScope.version, { - schemaUrl: defaultInstrumentationScope.schemaUrl, - }); + const meter = meterProvider.getMeter( + defaultInstrumentationScope.name, + defaultInstrumentationScope.version, + { + schemaUrl: defaultInstrumentationScope.schemaUrl, + } + ); const deltaReader = new TestDeltaMetricReader(); meterProvider.addMetricReader(deltaReader); const cumulativeReader = new TestMetricReader(); @@ -675,7 +683,10 @@ interface ValidateMetricData { isMonotonic?: boolean; } -async function validateExport(reader: MetricReader, expected: ValidateMetricData) { +async function validateExport( + reader: MetricReader, + expected: ValidateMetricData +) { const { resourceMetrics, errors } = await reader.collect(); assert.strictEqual(errors.length, 0); @@ -690,11 +701,7 @@ async function validateExport(reader: MetricReader, expected: ValidateMetricData const metric = metrics[0]; - assertMetricData( - metric, - expected.dataPointType, - expected.descriptor ?? null, - ); + assertMetricData(metric, expected.dataPointType, expected.descriptor ?? null); if (expected.dataPoints == null) { return; diff --git a/packages/sdk-metrics/test/Meter.test.ts b/packages/sdk-metrics/test/Meter.test.ts index c51c0baa52..4ffe41607d 100644 --- a/packages/sdk-metrics/test/Meter.test.ts +++ b/packages/sdk-metrics/test/Meter.test.ts @@ -34,7 +34,8 @@ describe('Meter', () => { it('should create counter', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const counter = meter.createCounter('foobar'); assert(counter instanceof CounterInstrument); @@ -45,7 +46,8 @@ describe('Meter', () => { it('should create up down counter', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const upDownCounter = meter.createUpDownCounter('foobar'); assert(upDownCounter instanceof UpDownCounterInstrument); @@ -56,7 +58,8 @@ describe('Meter', () => { it('should create histogram', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const histogram = meter.createHistogram('foobar'); assert(histogram instanceof HistogramInstrument); @@ -67,7 +70,8 @@ describe('Meter', () => { it('should create observable gauge', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const observableGauge = meter.createObservableGauge('foobar'); assert(observableGauge instanceof ObservableGaugeInstrument); @@ -78,7 +82,8 @@ describe('Meter', () => { it('should create observable counter', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const observableCounter = meter.createObservableCounter('foobar'); assert(observableCounter instanceof ObservableCounterInstrument); @@ -89,10 +94,14 @@ describe('Meter', () => { it('should create observable up-down-counter', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); - const observableUpDownCounter = meter.createObservableUpDownCounter('foobar'); - assert(observableUpDownCounter instanceof ObservableUpDownCounterInstrument); + const observableUpDownCounter = + meter.createObservableUpDownCounter('foobar'); + assert( + observableUpDownCounter instanceof ObservableUpDownCounterInstrument + ); }); }); @@ -100,27 +109,30 @@ describe('Meter', () => { it('should register callback without exception', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const observableGauge = meter.createObservableGauge('test-gauge'); const observableCounter = meter.createObservableCounter('test-counter'); - const observableUpDownCounter = meter.createObservableUpDownCounter('test-up-down-counter'); + const observableUpDownCounter = meter.createObservableUpDownCounter( + 'test-up-down-counter' + ); - meter.addBatchObservableCallback(() => {}, [ observableGauge, observableCounter, observableUpDownCounter ]); + meter.addBatchObservableCallback(() => {}, [ + observableGauge, + observableCounter, + observableUpDownCounter, + ]); }); it('should be tolerant with unknown observables', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); - const observables = [ - {}, - 1, - 'foo', - Symbol(), - ] as unknown as Observable[]; + const observables = [{}, 1, 'foo', Symbol()] as unknown as Observable[]; meter.addBatchObservableCallback(() => {}, observables); }); }); @@ -129,15 +141,26 @@ describe('Meter', () => { it('should remove callback without exception', () => { const meterSharedState = new MeterSharedState( new MeterProviderSharedState(defaultResource), - defaultInstrumentationScope); + defaultInstrumentationScope + ); const meter = new Meter(meterSharedState); const observableGauge = meter.createObservableGauge('test-gauge'); const observableCounter = meter.createObservableCounter('test-counter'); - const observableUpDownCounter = meter.createObservableUpDownCounter('test-up-down-counter'); + const observableUpDownCounter = meter.createObservableUpDownCounter( + 'test-up-down-counter' + ); const callback = () => {}; - meter.addBatchObservableCallback(callback, [ observableGauge, observableCounter, observableUpDownCounter ]); - meter.removeBatchObservableCallback(callback, [ observableGauge, observableCounter, observableUpDownCounter ]); + meter.addBatchObservableCallback(callback, [ + observableGauge, + observableCounter, + observableUpDownCounter, + ]); + meter.removeBatchObservableCallback(callback, [ + observableGauge, + observableCounter, + observableUpDownCounter, + ]); // Remove a not registered callback. meter.removeBatchObservableCallback(() => {}, []); diff --git a/packages/sdk-metrics/test/MeterProvider.test.ts b/packages/sdk-metrics/test/MeterProvider.test.ts index a7e6ac12fb..48055695ed 100644 --- a/packages/sdk-metrics/test/MeterProvider.test.ts +++ b/packages/sdk-metrics/test/MeterProvider.test.ts @@ -20,7 +20,7 @@ import { assertScopeMetrics, assertMetricData, assertPartialDeepStrictEqual, - defaultResource + defaultResource, } from './util'; import { TestMetricReader } from './export/TestMetricReader'; import * as sinon from 'sinon'; @@ -82,8 +82,12 @@ describe('MeterProvider', () => { meterProvider.getMeter('meter1', 'v1.0.1'); meterProvider.getMeter('meter1', 'v1.0.1'); // name+version+schemaUrl pair 4 - meterProvider.getMeter('meter1', 'v1.0.1', { schemaUrl: 'https://opentelemetry.io/schemas/1.4.0' }); - meterProvider.getMeter('meter1', 'v1.0.1', { schemaUrl: 'https://opentelemetry.io/schemas/1.4.0' }); + meterProvider.getMeter('meter1', 'v1.0.1', { + schemaUrl: 'https://opentelemetry.io/schemas/1.4.0', + }); + meterProvider.getMeter('meter1', 'v1.0.1', { + schemaUrl: 'https://opentelemetry.io/schemas/1.4.0', + }); // Perform collection. const { resourceMetrics, errors } = await reader.collect(); @@ -95,15 +99,15 @@ describe('MeterProvider', () => { // InstrumentationScope matches from de-duplicated meters. assertScopeMetrics(resourceMetrics.scopeMetrics[0], { name: 'meter1', - version: 'v1.0.0' + version: 'v1.0.0', }); assertScopeMetrics(resourceMetrics.scopeMetrics[1], { name: 'meter2', - version: 'v1.0.0' + version: 'v1.0.0', }); assertScopeMetrics(resourceMetrics.scopeMetrics[2], { name: 'meter1', - version: 'v1.0.1' + version: 'v1.0.1', }); assertScopeMetrics(resourceMetrics.scopeMetrics[3], { name: 'meter1', @@ -123,8 +127,8 @@ describe('MeterProvider', () => { name: 'renamed-instrument', description: 'my renamed instrument', instrumentName: 'non-renamed-instrument', - }) - ] + }), + ], }); const reader = new TestMetricReader(); @@ -145,45 +149,54 @@ describe('MeterProvider', () => { // InstrumentationScope matches the only created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[0], { name: 'meter1', - version: 'v1.0.0' + version: 'v1.0.0', }); // Collected only one Metric. assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 1); // View updated name and description. - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'renamed-instrument', - type: InstrumentType.COUNTER, - description: 'my renamed instrument' - }); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'renamed-instrument', + type: InstrumentType.COUNTER, + description: 'my renamed instrument', + } + ); // Only one DataPoint added. - assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics[0].dataPoints.length, 1); + assert.strictEqual( + resourceMetrics.scopeMetrics[0].metrics[0].dataPoints.length, + 1 + ); // DataPoint matches attributes and point. - assertPartialDeepStrictEqual(resourceMetrics.scopeMetrics[0].metrics[0].dataPoints[0], { - // MetricAttributes are still there. - attributes: { - attrib1: 'attrib_value1', - attrib2: 'attrib_value2' - }, - // Value that has been added to the counter. - value: 1 - }); + assertPartialDeepStrictEqual( + resourceMetrics.scopeMetrics[0].metrics[0].dataPoints[0], + { + // MetricAttributes are still there. + attributes: { + attrib1: 'attrib_value1', + attrib2: 'attrib_value2', + }, + // Value that has been added to the counter. + value: 1, + } + ); }); it('with attributeKeys should drop non-listed attributes', async () => { - // Add view to drop all attributes except 'attrib1' const meterProvider = new MeterProvider({ resource: defaultResource, views: [ new View({ attributeKeys: ['attrib1'], - instrumentName: 'non-renamed-instrument' - }) - ] + instrumentName: 'non-renamed-instrument', + }), + ], }); const reader = new TestMetricReader(); @@ -204,30 +217,40 @@ describe('MeterProvider', () => { // InstrumentationScope matches the only created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[0], { name: 'meter1', - version: 'v1.0.0' + version: 'v1.0.0', }); // Collected only one Metric. assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 1); // View updated name and description. - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'non-renamed-instrument', - type: InstrumentType.COUNTER, - }); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'non-renamed-instrument', + type: InstrumentType.COUNTER, + } + ); // Only one DataPoint added. - assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics[0].dataPoints.length, 1); + assert.strictEqual( + resourceMetrics.scopeMetrics[0].metrics[0].dataPoints.length, + 1 + ); // DataPoint matches attributes and point. - assertPartialDeepStrictEqual(resourceMetrics.scopeMetrics[0].metrics[0].dataPoints[0], { - // 'attrib_1' is still here but 'attrib_2' is not. - attributes: { - attrib1: 'attrib_value1' - }, - // Value that has been added to the counter. - value: 1 - }); + assertPartialDeepStrictEqual( + resourceMetrics.scopeMetrics[0].metrics[0].dataPoints[0], + { + // 'attrib_1' is still here but 'attrib_2' is not. + attributes: { + attrib1: 'attrib_value1', + }, + // Value that has been added to the counter. + value: 1, + } + ); }); it('with no meter name should apply view to instruments of all meters', async () => { @@ -235,8 +258,11 @@ describe('MeterProvider', () => { const meterProvider = new MeterProvider({ resource: defaultResource, views: [ - new View({ name: 'renamed-instrument', instrumentName: 'test-counter' }) - ] + new View({ + name: 'renamed-instrument', + instrumentName: 'test-counter', + }), + ], }); const reader = new TestMetricReader(); @@ -264,32 +290,40 @@ describe('MeterProvider', () => { // First InstrumentationScope matches the first created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[0], { name: 'meter1', - version: 'v1.0.0' + version: 'v1.0.0', }); // Collected one Metric on 'meter1' assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 1); // View updated the name to 'renamed-instrument' and instrument is still a Counter - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'renamed-instrument', - type: InstrumentType.COUNTER, - }); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'renamed-instrument', + type: InstrumentType.COUNTER, + } + ); // Second InstrumentationScope matches the second created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[1], { name: 'meter2', - version: 'v1.0.0' + version: 'v1.0.0', }); // Collected one Metric on 'meter2' assert.strictEqual(resourceMetrics.scopeMetrics[1].metrics.length, 1); // View updated the name to 'renamed-instrument' and instrument is still a Counter - assertMetricData(resourceMetrics.scopeMetrics[1].metrics[0], DataPointType.SUM, { - name: 'renamed-instrument', - type: InstrumentType.COUNTER - }); + assertMetricData( + resourceMetrics.scopeMetrics[1].metrics[0], + DataPointType.SUM, + { + name: 'renamed-instrument', + type: InstrumentType.COUNTER, + } + ); }); it('with meter name should apply view to only the selected meter', async () => { @@ -300,9 +334,9 @@ describe('MeterProvider', () => { new View({ name: 'renamed-instrument', instrumentName: 'test-counter', - meterName: 'meter1' - }) - ] + meterName: 'meter1', + }), + ], }); const reader = new TestMetricReader(); @@ -330,32 +364,40 @@ describe('MeterProvider', () => { // First InstrumentationScope matches the first created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[0], { name: 'meter1', - version: 'v1.0.0' + version: 'v1.0.0', }); // Collected one Metric on 'meter1' assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 1); // View updated the name to 'renamed-instrument' and instrument is still a Counter - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'renamed-instrument', - type: InstrumentType.COUNTER - }); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'renamed-instrument', + type: InstrumentType.COUNTER, + } + ); // Second InstrumentationScope matches the second created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[1], { name: 'meter2', - version: 'v1.0.0' + version: 'v1.0.0', }); // Collected one Metric on 'meter2' assert.strictEqual(resourceMetrics.scopeMetrics[1].metrics.length, 1); // No updated name on 'test-counter'. - assertMetricData(resourceMetrics.scopeMetrics[1].metrics[0], DataPointType.SUM, { - name: 'test-counter', - type: InstrumentType.COUNTER - }); + assertMetricData( + resourceMetrics.scopeMetrics[1].metrics[0], + DataPointType.SUM, + { + name: 'test-counter', + type: InstrumentType.COUNTER, + } + ); }); it('with different instrument types does not throw', async () => { @@ -366,14 +408,14 @@ describe('MeterProvider', () => { new View({ name: 'renamed-instrument', instrumentName: 'test-counter', - meterName: 'meter1' + meterName: 'meter1', }), new View({ name: 'renamed-instrument', instrumentName: 'test-histogram', - meterName: 'meter1' - }) - ] + meterName: 'meter1', + }), + ], }); const reader = new TestMetricReader(); meterProvider.addMetricReader(reader); @@ -397,21 +439,29 @@ describe('MeterProvider', () => { // InstrumentationScope matches the only created Meter. assertScopeMetrics(resourceMetrics.scopeMetrics[0], { name: 'meter1', - version: 'v1.0.0' + version: 'v1.0.0', }); // Two metrics are collected ('renamed-instrument'-Counter and 'renamed-instrument'-Histogram) assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 2); // Both 'renamed-instrument' are still exported with their types. - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'renamed-instrument', - type: InstrumentType.COUNTER - }); - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[1], DataPointType.HISTOGRAM, { - name: 'renamed-instrument', - type: InstrumentType.HISTOGRAM - }); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'renamed-instrument', + type: InstrumentType.COUNTER, + } + ); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[1], + DataPointType.HISTOGRAM, + { + name: 'renamed-instrument', + type: InstrumentType.HISTOGRAM, + } + ); }); }); @@ -431,9 +481,13 @@ describe('MeterProvider', () => { await meterProvider.shutdown(); assert.strictEqual(reader1ShutdownSpy.callCount, 1); - assert.deepStrictEqual(reader1ShutdownSpy.args[0][0], { timeoutMillis: 1234 }); + assert.deepStrictEqual(reader1ShutdownSpy.args[0][0], { + timeoutMillis: 1234, + }); assert.strictEqual(reader2ShutdownSpy.callCount, 1); - assert.deepStrictEqual(reader2ShutdownSpy.args[0][0], { timeoutMillis: 1234 }); + assert.deepStrictEqual(reader2ShutdownSpy.args[0][0], { + timeoutMillis: 1234, + }); }); }); @@ -451,11 +505,19 @@ describe('MeterProvider', () => { await meterProvider.forceFlush({ timeoutMillis: 1234 }); await meterProvider.forceFlush({ timeoutMillis: 5678 }); assert.strictEqual(reader1ForceFlushSpy.callCount, 2); - assert.deepStrictEqual(reader1ForceFlushSpy.args[0][0], { timeoutMillis: 1234 }); - assert.deepStrictEqual(reader1ForceFlushSpy.args[1][0], { timeoutMillis: 5678 }); + assert.deepStrictEqual(reader1ForceFlushSpy.args[0][0], { + timeoutMillis: 1234, + }); + assert.deepStrictEqual(reader1ForceFlushSpy.args[1][0], { + timeoutMillis: 5678, + }); assert.strictEqual(reader2ForceFlushSpy.callCount, 2); - assert.deepStrictEqual(reader2ForceFlushSpy.args[0][0], { timeoutMillis: 1234 }); - assert.deepStrictEqual(reader2ForceFlushSpy.args[1][0], { timeoutMillis: 5678 }); + assert.deepStrictEqual(reader2ForceFlushSpy.args[0][0], { + timeoutMillis: 1234, + }); + assert.deepStrictEqual(reader2ForceFlushSpy.args[1][0], { + timeoutMillis: 5678, + }); await meterProvider.shutdown(); await meterProvider.forceFlush(); diff --git a/packages/sdk-metrics/test/ObservableResult.test.ts b/packages/sdk-metrics/test/ObservableResult.test.ts index c425eafec7..f07b3f9f21 100644 --- a/packages/sdk-metrics/test/ObservableResult.test.ts +++ b/packages/sdk-metrics/test/ObservableResult.test.ts @@ -20,15 +20,21 @@ import { InstrumentType } from '../src'; import { ObservableInstrument } from '../src/Instruments'; import { BatchObservableResultImpl, - ObservableResultImpl + ObservableResultImpl, } from '../src/ObservableResult'; import { ObservableRegistry } from '../src/state/ObservableRegistry'; -import { commonAttributes, commonValues, defaultInstrumentDescriptor } from './util'; +import { + commonAttributes, + commonValues, + defaultInstrumentDescriptor, +} from './util'; describe('ObservableResultImpl', () => { describe('observe', () => { it('should observe common values', () => { - const observableResult = new ObservableResultImpl(defaultInstrumentDescriptor); + const observableResult = new ObservableResultImpl( + defaultInstrumentDescriptor + ); for (const value of commonValues) { for (const attributes of commonAttributes) { observableResult.observe(value, attributes); @@ -37,7 +43,9 @@ describe('ObservableResultImpl', () => { }); it('should deduplicate observations', () => { - const observableResult = new ObservableResultImpl(defaultInstrumentDescriptor); + const observableResult = new ObservableResultImpl( + defaultInstrumentDescriptor + ); observableResult.observe(1, {}); observableResult.observe(2, {}); @@ -64,7 +72,11 @@ describe('BatchObservableResultImpl', () => { describe('observe', () => { it('should observe common values', () => { const observableResult = new BatchObservableResultImpl(); - const observable = new ObservableInstrument(defaultInstrumentDescriptor, [], new ObservableRegistry()); + const observable = new ObservableInstrument( + defaultInstrumentDescriptor, + [], + new ObservableRegistry() + ); for (const value of commonValues) { for (const attributes of commonAttributes) { observableResult.observe(observable, value, attributes); @@ -75,8 +87,16 @@ describe('BatchObservableResultImpl', () => { it('should deduplicate observations', () => { const observableResult = new BatchObservableResultImpl(); const observableRegistry = new ObservableRegistry(); - const observable1 = new ObservableInstrument(defaultInstrumentDescriptor, [], observableRegistry); - const observable2 = new ObservableInstrument(defaultInstrumentDescriptor, [], observableRegistry); + const observable1 = new ObservableInstrument( + defaultInstrumentDescriptor, + [], + observableRegistry + ); + const observable2 = new ObservableInstrument( + defaultInstrumentDescriptor, + [], + observableRegistry + ); observableResult.observe(observable1, 1, {}); observableResult.observe(observable1, 2, {}); observableResult.observe(observable2, 4, {}); @@ -93,13 +113,17 @@ describe('BatchObservableResultImpl', () => { it('should trunc value if ValueType is INT', () => { const observableResult = new BatchObservableResultImpl(); - const observable = new ObservableInstrument({ - name: 'test', - description: '', - type: InstrumentType.COUNTER, - unit: '', - valueType: ValueType.INT, - }, [], new ObservableRegistry()); + const observable = new ObservableInstrument( + { + name: 'test', + description: '', + type: InstrumentType.COUNTER, + unit: '', + valueType: ValueType.INT, + }, + [], + new ObservableRegistry() + ); observableResult.observe(observable, 1.1, {}); assert.strictEqual(observableResult._buffer.get(observable)?.get({}), 1); diff --git a/packages/sdk-metrics/test/aggregator/Drop.test.ts b/packages/sdk-metrics/test/aggregator/Drop.test.ts index 5f83b69aed..b66ece90ac 100644 --- a/packages/sdk-metrics/test/aggregator/Drop.test.ts +++ b/packages/sdk-metrics/test/aggregator/Drop.test.ts @@ -53,12 +53,15 @@ describe('DropAggregator', () => { const endTime: HrTime = [1, 1]; - assert.strictEqual(aggregator.toMetricData( - defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE, - [[{}, undefined]], - endTime, - ), undefined); + assert.strictEqual( + aggregator.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, undefined]], + endTime + ), + undefined + ); }); }); }); diff --git a/packages/sdk-metrics/test/aggregator/Histogram.test.ts b/packages/sdk-metrics/test/aggregator/Histogram.test.ts index 3dadaf8b1c..5d03477d3e 100644 --- a/packages/sdk-metrics/test/aggregator/Histogram.test.ts +++ b/packages/sdk-metrics/test/aggregator/Histogram.test.ts @@ -20,16 +20,13 @@ import { AggregationTemporality, InstrumentType, DataPointType, - MetricData + MetricData, } from '../../src'; import { HistogramAccumulation, - HistogramAggregator + HistogramAggregator, } from '../../src/aggregator'; -import { - commonValues, - defaultInstrumentDescriptor -} from '../util'; +import { commonValues, defaultInstrumentDescriptor } from '../util'; describe('HistogramAggregator', () => { describe('createAccumulation', () => { @@ -75,13 +72,13 @@ describe('HistogramAggregator', () => { assert.deepStrictEqual(aggregator.merge(prev, delta).toPointValue(), { buckets: { boundaries: [1, 10, 100], - counts: [4, 0, 0, 0] + counts: [4, 0, 0, 0], }, count: 4, hasMinMax: true, max: -5, min: -30, - sum: -65 + sum: -65, }); }); }); @@ -110,7 +107,7 @@ describe('HistogramAggregator', () => { sum: 13, hasMinMax: false, min: Infinity, - max: -Infinity + max: -Infinity, }); assert.deepStrictEqual(aggregator.diff(prev, curr), expected); @@ -144,17 +141,20 @@ describe('HistogramAggregator', () => { count: 2, sum: 1, min: 0, - max: 1 + max: 1, }, }, ], }; - assert.deepStrictEqual(aggregator.toMetricData( - defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE, - [[{}, accumulation]], - endTime, - ), expected); + assert.deepStrictEqual( + aggregator.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, accumulation]], + endTime + ), + expected + ); }); it('should transform to expected data with recordMinMax = false', () => { @@ -183,17 +183,20 @@ describe('HistogramAggregator', () => { count: 2, sum: 1, min: undefined, - max: undefined + max: undefined, }, }, ], }; - assert.deepStrictEqual(aggregator.toMetricData( - defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE, - [[{}, accumulation]], - endTime, - ), expected); + assert.deepStrictEqual( + aggregator.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, accumulation]], + endTime + ), + expected + ); }); function testSum(instrumentType: InstrumentType, expectSum: boolean) { @@ -217,23 +220,30 @@ describe('HistogramAggregator', () => { }, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime, + endTime ); assert.notStrictEqual(aggregatedData, undefined); - assert.strictEqual(aggregatedData?.dataPoints[0].value.sum, expectSum ? 5 : undefined); + assert.strictEqual( + aggregatedData?.dataPoints[0].value.sum, + expectSum ? 5 : undefined + ); } describe('should have undefined sum when used with', () => { it('UpDownCounter', () => testSum(InstrumentType.UP_DOWN_COUNTER, false)); - it('ObservableUpDownCounter', () => testSum(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, false)); - it('ObservableUpDownCounter', () => testSum(InstrumentType.OBSERVABLE_GAUGE, false)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, false)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.OBSERVABLE_GAUGE, false)); }); describe('should include sum with', () => { it('UpDownCounter', () => testSum(InstrumentType.COUNTER, true)); - it('ObservableUpDownCounter', () => testSum(InstrumentType.HISTOGRAM, true)); - it('ObservableUpDownCounter', () => testSum(InstrumentType.OBSERVABLE_COUNTER, true)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.HISTOGRAM, true)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.OBSERVABLE_COUNTER, true)); }); }); }); diff --git a/packages/sdk-metrics/test/aggregator/LastValue.test.ts b/packages/sdk-metrics/test/aggregator/LastValue.test.ts index b309af6b6c..206b4c4acc 100644 --- a/packages/sdk-metrics/test/aggregator/LastValue.test.ts +++ b/packages/sdk-metrics/test/aggregator/LastValue.test.ts @@ -17,7 +17,10 @@ import { HrTime } from '@opentelemetry/api'; import * as assert from 'assert'; import { AggregationTemporality } from '../../src'; -import { LastValueAccumulation, LastValueAggregator } from '../../src/aggregator'; +import { + LastValueAccumulation, + LastValueAggregator, +} from '../../src/aggregator'; import { MetricData, DataPointType } from '../../src/export/MetricData'; import { commonValues, defaultInstrumentDescriptor, sleep } from '../util'; @@ -58,10 +61,20 @@ describe('LastValueAggregator', () => { assert.deepStrictEqual( aggregator.merge(accumulation1, accumulation2), - new LastValueAccumulation(accumulation1.startTime, 4, accumulation1.sampleTime)); + new LastValueAccumulation( + accumulation1.startTime, + 4, + accumulation1.sampleTime + ) + ); assert.deepStrictEqual( aggregator.merge(accumulation2, accumulation1), - new LastValueAccumulation(accumulation2.startTime, 4, accumulation1.sampleTime)); + new LastValueAccumulation( + accumulation2.startTime, + 4, + accumulation1.sampleTime + ) + ); }); }); @@ -92,10 +105,20 @@ describe('LastValueAggregator', () => { assert.deepStrictEqual( aggregator.diff(accumulation1, accumulation2), - new LastValueAccumulation(accumulation2.startTime, 4, accumulation1.sampleTime)); + new LastValueAccumulation( + accumulation2.startTime, + 4, + accumulation1.sampleTime + ) + ); assert.deepStrictEqual( aggregator.diff(accumulation2, accumulation1), - new LastValueAccumulation(accumulation1.startTime, 4, accumulation1.sampleTime)); + new LastValueAccumulation( + accumulation1.startTime, + 4, + accumulation1.sampleTime + ) + ); }); }); @@ -124,12 +147,15 @@ describe('LastValueAggregator', () => { }, ], }; - assert.deepStrictEqual(aggregator.toMetricData( - defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE, - [[{}, accumulation]], - endTime, - ), expected); + assert.deepStrictEqual( + aggregator.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, accumulation]], + endTime + ), + expected + ); }); }); }); diff --git a/packages/sdk-metrics/test/aggregator/Sum.test.ts b/packages/sdk-metrics/test/aggregator/Sum.test.ts index 67034ba6ca..a59549ca8f 100644 --- a/packages/sdk-metrics/test/aggregator/Sum.test.ts +++ b/packages/sdk-metrics/test/aggregator/Sum.test.ts @@ -104,12 +104,15 @@ describe('SumAggregator', () => { }, ], }; - assert.deepStrictEqual(aggregator.toMetricData( - defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE, - [[{}, accumulation]], - endTime, - ), expected); + assert.deepStrictEqual( + aggregator.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, accumulation]], + endTime + ), + expected + ); }); }); }); diff --git a/packages/sdk-metrics/test/export/ConsoleMetricExporter.test.ts b/packages/sdk-metrics/test/export/ConsoleMetricExporter.test.ts index 7cd8048bf9..fe46fa9f71 100644 --- a/packages/sdk-metrics/test/export/ConsoleMetricExporter.test.ts +++ b/packages/sdk-metrics/test/export/ConsoleMetricExporter.test.ts @@ -23,16 +23,16 @@ import { defaultResource } from '../util'; import * as assert from 'assert'; import * as sinon from 'sinon'; import { assertAggregationTemporalitySelector } from './utils'; -import { - DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR -} from '../../src/export/AggregationSelector'; -import { - AggregationTemporality, - InstrumentType -} from '../../src'; - - -async function waitForNumberOfExports(exporter: sinon.SinonSpy<[metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void], void>, numberOfExports: number): Promise { +import { DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR } from '../../src/export/AggregationSelector'; +import { AggregationTemporality, InstrumentType } from '../../src'; + +async function waitForNumberOfExports( + exporter: sinon.SinonSpy< + [metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void], + void + >, + numberOfExports: number +): Promise { if (numberOfExports <= 0) { throw new Error('numberOfExports must be greater than or equal to 0'); } @@ -63,7 +63,7 @@ describe('ConsoleMetricExporter', () => { meterReader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 100, - exportTimeoutMillis: 100 + exportTimeoutMillis: 100, }); meterProvider.addMetricReader(meterReader); }); @@ -82,7 +82,9 @@ describe('ConsoleMetricExporter', () => { counter.add(10, counterAttribute); counter.add(10, counterAttribute); - const histogram = meter.createHistogram('histogram', { description: 'a histogram' }); + const histogram = meter.createHistogram('histogram', { + description: 'a histogram', + }); histogram.record(10); histogram.record(100); histogram.record(1000); @@ -97,20 +99,27 @@ describe('ConsoleMetricExporter', () => { const consoleMetric = consoleArgs[0]; const keys = Object.keys(consoleMetric).sort().join(','); - const expectedKeys = [ - 'dataPointType', - 'dataPoints', - 'descriptor', - ].join(','); + const expectedKeys = ['dataPointType', 'dataPoints', 'descriptor'].join( + ',' + ); - assert.ok(firstResourceMetric.resource.attributes.resourceKey === 'my-resource', 'resourceKey'); + assert.ok( + firstResourceMetric.resource.attributes.resourceKey === 'my-resource', + 'resourceKey' + ); assert.ok(keys === expectedKeys, 'expectedKeys'); assert.ok(consoleMetric.descriptor.name === 'counter_total', 'name'); - assert.ok(consoleMetric.descriptor.description === 'a test description', 'description'); + assert.ok( + consoleMetric.descriptor.description === 'a test description', + 'description' + ); assert.ok(consoleMetric.descriptor.type === 'COUNTER', 'type'); assert.ok(consoleMetric.descriptor.unit === '', 'unit'); assert.ok(consoleMetric.descriptor.valueType === 1, 'valueType'); - assert.ok(consoleMetric.dataPoints[0].attributes.key1 === 'attributeValue1', 'ensure metric attributes exists'); + assert.ok( + consoleMetric.dataPoints[0].attributes.key1 === 'attributeValue1', + 'ensure metric attributes exists' + ); assert.ok(spyExport.calledOnce); }); @@ -119,17 +128,28 @@ describe('ConsoleMetricExporter', () => { describe('constructor', () => { it('with no arguments should select cumulative temporality', () => { const exporter = new ConsoleMetricExporter(); - assertAggregationTemporalitySelector(exporter, DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR); + assertAggregationTemporalitySelector( + exporter, + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + ); }); it('with empty options should select cumulative temporality', () => { const exporter = new ConsoleMetricExporter({}); - assertAggregationTemporalitySelector(exporter, DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR); + assertAggregationTemporalitySelector( + exporter, + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + ); }); it('with cumulative preference should select cumulative temporality', () => { - const exporter = new ConsoleMetricExporter({ temporalitySelector: _ => AggregationTemporality.CUMULATIVE }); - assertAggregationTemporalitySelector(exporter, _ => AggregationTemporality.CUMULATIVE); + const exporter = new ConsoleMetricExporter({ + temporalitySelector: _ => AggregationTemporality.CUMULATIVE, + }); + assertAggregationTemporalitySelector( + exporter, + _ => AggregationTemporality.CUMULATIVE + ); }); it('with mixed preference should select matching temporality', () => { @@ -146,7 +166,9 @@ describe('ConsoleMetricExporter', () => { return AggregationTemporality.CUMULATIVE; } }; - const exporter = new ConsoleMetricExporter({ temporalitySelector: selector }); + const exporter = new ConsoleMetricExporter({ + temporalitySelector: selector, + }); assertAggregationTemporalitySelector(exporter, selector); }); }); diff --git a/packages/sdk-metrics/test/export/InMemoryMetricExporter.test.ts b/packages/sdk-metrics/test/export/InMemoryMetricExporter.test.ts index cab538fd2d..38a3f98586 100644 --- a/packages/sdk-metrics/test/export/InMemoryMetricExporter.test.ts +++ b/packages/sdk-metrics/test/export/InMemoryMetricExporter.test.ts @@ -24,7 +24,10 @@ import { PeriodicExportingMetricReader } from '../../src/export/PeriodicExportin import { MeterProvider } from '../../src/MeterProvider'; import { defaultResource } from '../util'; -async function waitForNumberOfExports(exporter: InMemoryMetricExporter , numberOfExports: number): Promise { +async function waitForNumberOfExports( + exporter: InMemoryMetricExporter, + numberOfExports: number +): Promise { if (numberOfExports <= 0) { throw new Error('numberOfExports must be greater than or equal to 0'); } @@ -52,7 +55,7 @@ describe('InMemoryMetricExporter', () => { meterReader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 100, - exportTimeoutMillis: 100 + exportTimeoutMillis: 100, }); meterProvider.addMetricReader(meterReader); }); @@ -69,17 +72,16 @@ describe('InMemoryMetricExporter', () => { }); const resourceMetrics: ResourceMetrics = { resource: resource, - scopeMetrics: - [ - { - scope: { - name: 'mylib', - version: '0.1.0', - schemaUrl: 'http://url.to.schema' - }, - metrics: [], - } - ] + scopeMetrics: [ + { + scope: { + name: 'mylib', + version: '0.1.0', + schemaUrl: 'http://url.to.schema', + }, + metrics: [], + }, + ], }; exporter.export(resourceMetrics, result => { assert.ok(result.code === ExportResultCode.FAILED); @@ -117,7 +119,9 @@ describe('InMemoryMetricExporter', () => { counter.add(10, counterAttribute); counter.add(10, counterAttribute); - const histogram = meter.createHistogram('histogram', { description: 'a histogram' }); + const histogram = meter.createHistogram('histogram', { + description: 'a histogram', + }); histogram.record(10); histogram.record(100); histogram.record(1000); diff --git a/packages/sdk-metrics/test/export/MetricReader.test.ts b/packages/sdk-metrics/test/export/MetricReader.test.ts index 32c00c24d4..297622982d 100644 --- a/packages/sdk-metrics/test/export/MetricReader.test.ts +++ b/packages/sdk-metrics/test/export/MetricReader.test.ts @@ -18,22 +18,16 @@ import * as assert from 'assert'; import * as sinon from 'sinon'; import { MeterProvider } from '../../src/MeterProvider'; import { assertRejects } from '../test-utils'; -import { - emptyResourceMetrics, - TestMetricProducer -} from './TestMetricProducer'; +import { emptyResourceMetrics, TestMetricProducer } from './TestMetricProducer'; import { TestMetricReader } from './TestMetricReader'; -import { - Aggregation, - AggregationTemporality -} from '../../src'; +import { Aggregation, AggregationTemporality } from '../../src'; import { DEFAULT_AGGREGATION_SELECTOR, DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR, } from '../../src/export/AggregationSelector'; import { assertAggregationSelector, - assertAggregationTemporalitySelector + assertAggregationTemporalitySelector, } from './utils'; describe('MetricReader', () => { @@ -44,8 +38,14 @@ describe('MetricReader', () => { const meterProvider2 = new MeterProvider(); meterProvider1.addMetricReader(reader); - assert.throws(() => meterProvider1.addMetricReader(reader), /MetricReader can not be bound to a MeterProvider again/); - assert.throws(() => meterProvider2.addMetricReader(reader), /MetricReader can not be bound to a MeterProvider again/); + assert.throws( + () => meterProvider1.addMetricReader(reader), + /MetricReader can not be bound to a MeterProvider again/ + ); + assert.throws( + () => meterProvider2.addMetricReader(reader), + /MetricReader can not be bound to a MeterProvider again/ + ); }); }); @@ -68,7 +68,10 @@ describe('MetricReader', () => { it('should throw on non-initialized instance', async () => { const reader = new TestMetricReader(); - await assertRejects(() => reader.collect(), /MetricReader is not bound to a MetricProducer/); + await assertRejects( + () => reader.collect(), + /MetricReader is not bound to a MetricProducer/ + ); }); it('should return empty on shut-down instance', async () => { @@ -98,13 +101,19 @@ describe('MetricReader', () => { describe('selectAggregation', () => { it('should override default when not provided with a selector', () => { - assertAggregationSelector(new TestMetricReader(), DEFAULT_AGGREGATION_SELECTOR); - assertAggregationSelector(new TestMetricReader({}), DEFAULT_AGGREGATION_SELECTOR); + assertAggregationSelector( + new TestMetricReader(), + DEFAULT_AGGREGATION_SELECTOR + ); + assertAggregationSelector( + new TestMetricReader({}), + DEFAULT_AGGREGATION_SELECTOR + ); }); it('should override default when provided with a selector', () => { const reader = new TestMetricReader({ - aggregationSelector: _instrumentType => Aggregation.Sum() + aggregationSelector: _instrumentType => Aggregation.Sum(), }); assertAggregationSelector(reader, _instrumentType => Aggregation.Sum()); reader.shutdown(); @@ -113,15 +122,25 @@ describe('MetricReader', () => { describe('selectAggregationTemporality', () => { it('should override default when not provided with a selector', () => { - assertAggregationTemporalitySelector(new TestMetricReader(), DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR); - assertAggregationTemporalitySelector(new TestMetricReader({}), DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR); + assertAggregationTemporalitySelector( + new TestMetricReader(), + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + ); + assertAggregationTemporalitySelector( + new TestMetricReader({}), + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + ); }); it('should override default when provided with a selector', () => { const reader = new TestMetricReader({ - aggregationTemporalitySelector: _instrumentType => AggregationTemporality.DELTA + aggregationTemporalitySelector: _instrumentType => + AggregationTemporality.DELTA, }); - assertAggregationTemporalitySelector(reader, _instrumentType => AggregationTemporality.DELTA); + assertAggregationTemporalitySelector( + reader, + _instrumentType => AggregationTemporality.DELTA + ); reader.shutdown(); }); }); diff --git a/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts b/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts index 9e3ac30826..d6fa69df9d 100644 --- a/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts +++ b/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts @@ -16,31 +16,21 @@ import { PeriodicExportingMetricReader } from '../../src/export/PeriodicExportingMetricReader'; import { AggregationTemporality } from '../../src/export/AggregationTemporality'; -import { - Aggregation, - InstrumentType, - PushMetricExporter -} from '../../src'; +import { Aggregation, InstrumentType, PushMetricExporter } from '../../src'; import { ResourceMetrics } from '../../src/export/MetricData'; import * as assert from 'assert'; import * as sinon from 'sinon'; import { TimeoutError } from '../../src/utils'; -import { - ExportResult, - ExportResultCode -} from '@opentelemetry/core'; +import { ExportResult, ExportResultCode } from '@opentelemetry/core'; import { assertRejects } from '../test-utils'; -import { - emptyResourceMetrics, - TestMetricProducer -} from './TestMetricProducer'; +import { emptyResourceMetrics, TestMetricProducer } from './TestMetricProducer'; import { assertAggregationSelector, - assertAggregationTemporalitySelector + assertAggregationTemporalitySelector, } from './utils'; import { DEFAULT_AGGREGATION_SELECTOR, - DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR, } from '../../src/export/AggregationSelector'; const MAX_32_BIT_INT = 2 ** 31 - 1; @@ -53,7 +43,10 @@ class TestMetricExporter implements PushMetricExporter { private _batches: ResourceMetrics[] = []; private _shutdown: boolean = false; - export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void { + export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void + ): void { this._batches.push(metrics); if (this.throwException) { @@ -61,7 +54,10 @@ class TestMetricExporter implements PushMetricExporter { } setTimeout(() => { if (this.failureResult) { - resultCallback({ code: ExportResultCode.FAILED, error: new Error('some error') }); + resultCallback({ + code: ExportResultCode.FAILED, + error: new Error('some error'), + }); } else { resultCallback({ code: ExportResultCode.SUCCESS }); } @@ -83,7 +79,9 @@ class TestMetricExporter implements PushMetricExporter { await new Promise(resolve => setTimeout(resolve, this.forceFlushTime)); } - async waitForNumberOfExports(numberOfExports: number): Promise { + async waitForNumberOfExports( + numberOfExports: number + ): Promise { if (numberOfExports <= 0) { throw new Error('numberOfExports must be greater than or equal to 0'); } @@ -96,7 +94,9 @@ class TestMetricExporter implements PushMetricExporter { } class TestDeltaMetricExporter extends TestMetricExporter { - selectAggregationTemporality(_instrumentType: InstrumentType): AggregationTemporality { + selectAggregationTemporality( + _instrumentType: InstrumentType + ): AggregationTemporality { return AggregationTemporality.DELTA; } } @@ -115,38 +115,53 @@ describe('PeriodicExportingMetricReader', () => { describe('constructor', () => { it('should construct PeriodicExportingMetricReader without exceptions', () => { const exporter = new TestDeltaMetricExporter(); - assert.doesNotThrow(() => new PeriodicExportingMetricReader({ - exporter, - exportIntervalMillis: 4000, - exportTimeoutMillis: 3000 - })); + assert.doesNotThrow( + () => + new PeriodicExportingMetricReader({ + exporter, + exportIntervalMillis: 4000, + exportTimeoutMillis: 3000, + }) + ); }); it('should throw when interval less or equal to 0', () => { const exporter = new TestDeltaMetricExporter(); - assert.throws(() => new PeriodicExportingMetricReader({ - exporter: exporter, - exportIntervalMillis: 0, - exportTimeoutMillis: 0 - }), /exportIntervalMillis must be greater than 0/); + assert.throws( + () => + new PeriodicExportingMetricReader({ + exporter: exporter, + exportIntervalMillis: 0, + exportTimeoutMillis: 0, + }), + /exportIntervalMillis must be greater than 0/ + ); }); it('should throw when timeout less or equal to 0', () => { const exporter = new TestDeltaMetricExporter(); - assert.throws(() => new PeriodicExportingMetricReader({ - exporter: exporter, - exportIntervalMillis: 1, - exportTimeoutMillis: 0 - }), /exportTimeoutMillis must be greater than 0/); + assert.throws( + () => + new PeriodicExportingMetricReader({ + exporter: exporter, + exportIntervalMillis: 1, + exportTimeoutMillis: 0, + }), + /exportTimeoutMillis must be greater than 0/ + ); }); it('should throw when timeout less or equal to interval', () => { const exporter = new TestDeltaMetricExporter(); - assert.throws(() => new PeriodicExportingMetricReader({ - exporter: exporter, - exportIntervalMillis: 100, - exportTimeoutMillis: 200 - }), /exportIntervalMillis must be greater than or equal to exportTimeoutMillis/); + assert.throws( + () => + new PeriodicExportingMetricReader({ + exporter: exporter, + exportIntervalMillis: 100, + exportTimeoutMillis: 200, + }), + /exportIntervalMillis must be greater than or equal to exportTimeoutMillis/ + ); }); it('should not start exporting', async () => { @@ -157,7 +172,7 @@ describe('PeriodicExportingMetricReader', () => { new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 1, - exportTimeoutMillis: 1 + exportTimeoutMillis: 1, }); await new Promise(resolve => setTimeout(resolve, 50)); @@ -171,13 +186,16 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 30, - exportTimeoutMillis: 20 + exportTimeoutMillis: 20, }); reader.setMetricProducer(new TestMetricProducer()); const result = await exporter.waitForNumberOfExports(2); - assert.deepStrictEqual(result, [emptyResourceMetrics, emptyResourceMetrics]); + assert.deepStrictEqual(result, [ + emptyResourceMetrics, + emptyResourceMetrics, + ]); await reader.shutdown(); }); }); @@ -189,13 +207,16 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 30, - exportTimeoutMillis: 20 + exportTimeoutMillis: 20, }); reader.setMetricProducer(new TestMetricProducer()); const result = await exporter.waitForNumberOfExports(2); - assert.deepStrictEqual(result, [emptyResourceMetrics, emptyResourceMetrics]); + assert.deepStrictEqual(result, [ + emptyResourceMetrics, + emptyResourceMetrics, + ]); exporter.throwException = false; await reader.shutdown(); @@ -207,13 +228,16 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 30, - exportTimeoutMillis: 20 + exportTimeoutMillis: 20, }); reader.setMetricProducer(new TestMetricProducer()); const result = await exporter.waitForNumberOfExports(2); - assert.deepStrictEqual(result, [emptyResourceMetrics, emptyResourceMetrics]); + assert.deepStrictEqual(result, [ + emptyResourceMetrics, + emptyResourceMetrics, + ]); exporter.failureResult = false; await reader.shutdown(); @@ -226,13 +250,16 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: 30, - exportTimeoutMillis: 20 + exportTimeoutMillis: 20, }); reader.setMetricProducer(new TestMetricProducer()); const result = await exporter.waitForNumberOfExports(2); - assert.deepStrictEqual(result, [emptyResourceMetrics, emptyResourceMetrics]); + assert.deepStrictEqual(result, [ + emptyResourceMetrics, + emptyResourceMetrics, + ]); exporter.throwException = false; await reader.shutdown(); @@ -251,7 +278,7 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: MAX_32_BIT_INT, - exportTimeoutMillis: 80 + exportTimeoutMillis: 80, }); reader.setMetricProducer(new TestMetricProducer()); @@ -271,8 +298,10 @@ describe('PeriodicExportingMetricReader', () => { }); reader.setMetricProducer(new TestMetricProducer()); - await assertRejects(() => reader.forceFlush({ timeoutMillis: 20 }), - TimeoutError); + await assertRejects( + () => reader.forceFlush({ timeoutMillis: 20 }), + TimeoutError + ); await reader.shutdown(); }); @@ -316,7 +345,10 @@ describe('PeriodicExportingMetricReader', () => { exportIntervalMillis: MAX_32_BIT_INT, }); - assertAggregationTemporalitySelector(reader, DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR); + assertAggregationTemporalitySelector( + reader, + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR + ); reader.shutdown(); }); @@ -328,7 +360,10 @@ describe('PeriodicExportingMetricReader', () => { exportIntervalMillis: MAX_32_BIT_INT, }); - assertAggregationTemporalitySelector(reader, exporter.selectAggregationTemporality); + assertAggregationTemporalitySelector( + reader, + exporter.selectAggregationTemporality + ); reader.shutdown(); }); }); @@ -373,7 +408,7 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: MAX_32_BIT_INT, - exportTimeoutMillis: 80 + exportTimeoutMillis: 80, }); reader.setMetricProducer(new TestMetricProducer()); @@ -392,8 +427,10 @@ describe('PeriodicExportingMetricReader', () => { }); reader.setMetricProducer(new TestMetricProducer()); - await assertRejects(() => reader.shutdown({ timeoutMillis: 20 }), - TimeoutError); + await assertRejects( + () => reader.shutdown({ timeoutMillis: 20 }), + TimeoutError + ); }); it('called twice should call export shutdown only once', async () => { @@ -403,7 +440,7 @@ describe('PeriodicExportingMetricReader', () => { const reader = new PeriodicExportingMetricReader({ exporter: exporter, exportIntervalMillis: MAX_32_BIT_INT, - exportTimeoutMillis: 80 + exportTimeoutMillis: 80, }); reader.setMetricProducer(new TestMetricProducer()); diff --git a/packages/sdk-metrics/test/export/TestMetricExporter.ts b/packages/sdk-metrics/test/export/TestMetricExporter.ts index 67f33a898d..86b726bb34 100644 --- a/packages/sdk-metrics/test/export/TestMetricExporter.ts +++ b/packages/sdk-metrics/test/export/TestMetricExporter.ts @@ -15,11 +15,18 @@ */ import { ExportResult, ExportResultCode } from '@opentelemetry/core'; -import { AggregationTemporality, PushMetricExporter, ResourceMetrics } from '../../src'; +import { + AggregationTemporality, + PushMetricExporter, + ResourceMetrics, +} from '../../src'; export class TestMetricExporter implements PushMetricExporter { resourceMetricsList: ResourceMetrics[] = []; - export(resourceMetrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void { + export( + resourceMetrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void + ): void { this.resourceMetricsList.push(resourceMetrics); process.nextTick(() => resultCallback({ code: ExportResultCode.SUCCESS })); } diff --git a/packages/sdk-metrics/test/export/TestMetricProducer.ts b/packages/sdk-metrics/test/export/TestMetricProducer.ts index cb1247ed00..29137d62a4 100644 --- a/packages/sdk-metrics/test/export/TestMetricProducer.ts +++ b/packages/sdk-metrics/test/export/TestMetricProducer.ts @@ -18,7 +18,10 @@ import { CollectionResult } from '../../src/export/MetricData'; import { MetricProducer } from '../../src/export/MetricProducer'; import { defaultResource } from '../util'; -export const emptyResourceMetrics = { resource: defaultResource, scopeMetrics: [] }; +export const emptyResourceMetrics = { + resource: defaultResource, + scopeMetrics: [], +}; export class TestMetricProducer implements MetricProducer { async collect(): Promise { diff --git a/packages/sdk-metrics/test/export/TestMetricReader.ts b/packages/sdk-metrics/test/export/TestMetricReader.ts index 40ae90e153..61727322a4 100644 --- a/packages/sdk-metrics/test/export/TestMetricReader.ts +++ b/packages/sdk-metrics/test/export/TestMetricReader.ts @@ -14,10 +14,7 @@ * limitations under the License. */ -import { - AggregationTemporality, - MetricReader, -} from '../../src'; +import { AggregationTemporality, MetricReader } from '../../src'; import { MetricCollector } from '../../src/state/MetricCollector'; import { MetricReaderOptions } from '../../src/export/MetricReader'; @@ -25,7 +22,6 @@ import { MetricReaderOptions } from '../../src/export/MetricReader'; * A test metric reader that implements no-op onForceFlush() and onShutdown() handlers. */ export class TestMetricReader extends MetricReader { - protected onForceFlush(): Promise { return Promise.resolve(undefined); } diff --git a/packages/sdk-metrics/test/export/utils.ts b/packages/sdk-metrics/test/export/utils.ts index 3ad8543be7..a5decfae41 100644 --- a/packages/sdk-metrics/test/export/utils.ts +++ b/packages/sdk-metrics/test/export/utils.ts @@ -19,7 +19,7 @@ import { AggregationTemporalitySelector, InstrumentType, MetricReader, - PushMetricExporter + PushMetricExporter, } from '../../src'; import * as assert from 'assert'; @@ -29,7 +29,7 @@ const instrumentTypes = [ InstrumentType.UP_DOWN_COUNTER, InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, InstrumentType.HISTOGRAM, - InstrumentType.OBSERVABLE_GAUGE + InstrumentType.OBSERVABLE_GAUGE, ]; /** @@ -37,11 +37,16 @@ const instrumentTypes = [ * @param reader * @param expectedSelector */ -export function assertAggregationSelector(reader: MetricReader | PushMetricExporter, expectedSelector: AggregationSelector) { +export function assertAggregationSelector( + reader: MetricReader | PushMetricExporter, + expectedSelector: AggregationSelector +) { for (const instrumentType of instrumentTypes) { - assert.strictEqual(reader.selectAggregation?.(instrumentType), + assert.strictEqual( + reader.selectAggregation?.(instrumentType), expectedSelector(instrumentType), - `incorrect aggregation selection for ${InstrumentType[instrumentType]}`); + `incorrect aggregation selection for ${InstrumentType[instrumentType]}` + ); } } @@ -50,10 +55,15 @@ export function assertAggregationSelector(reader: MetricReader | PushMetricExpor * @param reader * @param expectedSelector */ -export function assertAggregationTemporalitySelector(reader: MetricReader | PushMetricExporter, expectedSelector: AggregationTemporalitySelector) { +export function assertAggregationTemporalitySelector( + reader: MetricReader | PushMetricExporter, + expectedSelector: AggregationTemporalitySelector +) { for (const instrumentType of instrumentTypes) { - assert.strictEqual(reader.selectAggregationTemporality?.(instrumentType), + assert.strictEqual( + reader.selectAggregationTemporality?.(instrumentType), expectedSelector(instrumentType), - `incorrect aggregation temporality selection for ${InstrumentType[instrumentType]}`); + `incorrect aggregation temporality selection for ${InstrumentType[instrumentType]}` + ); } } diff --git a/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts b/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts index 0500d012e8..9fe742ca02 100644 --- a/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts +++ b/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts @@ -23,7 +23,12 @@ import { MetricCollectorHandle } from '../../src/state/MetricCollector'; import { AsyncMetricStorage } from '../../src/state/AsyncMetricStorage'; import { NoopAttributesProcessor } from '../../src/view/AttributesProcessor'; import { ObservableRegistry } from '../../src/state/ObservableRegistry'; -import { assertMetricData, assertDataPoint, defaultInstrumentDescriptor, ObservableCallbackDelegate } from '../util'; +import { + assertMetricData, + assertDataPoint, + defaultInstrumentDescriptor, + ObservableCallbackDelegate, +} from '../util'; import { ObservableInstrument } from '../../src/Instruments'; import { HrTime } from '@opentelemetry/api'; @@ -45,7 +50,7 @@ describe('AsyncMetricStorage', () => { const metricStorage = new AsyncMetricStorage( defaultInstrumentDescriptor, new SumAggregator(true), - new NoopAttributesProcessor(), + new NoopAttributesProcessor() ); const observable = new ObservableInstrument( defaultInstrumentDescriptor, @@ -66,13 +71,32 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 3); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 1, collectionTime, collectionTime); - assertDataPoint(metric.dataPoints[1], { key: '2' }, 2, collectionTime, collectionTime); - assertDataPoint(metric.dataPoints[2], { key: '3' }, 3, collectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 1, + collectionTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[1], + { key: '2' }, + 2, + collectionTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[2], + { key: '3' }, + 3, + collectionTime, + collectionTime + ); } delegate.setDelegate(observableResult => {}); @@ -83,7 +107,8 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 0); @@ -100,14 +125,33 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 3); // All values were diffed. StartTime is being reset for gaps. - assertDataPoint(metric.dataPoints[0], { key: '1' }, 3, collectionTime, collectionTime); - assertDataPoint(metric.dataPoints[1], { key: '2' }, 3, collectionTime, collectionTime); - assertDataPoint(metric.dataPoints[2], { key: '3' }, 3, collectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 3, + collectionTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[1], + { key: '2' }, + 3, + collectionTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[2], + { key: '3' }, + 3, + collectionTime, + collectionTime + ); } }); @@ -117,7 +161,7 @@ describe('AsyncMetricStorage', () => { const metricStorage = new AsyncMetricStorage( defaultInstrumentDescriptor, new SumAggregator(true), - new NoopAttributesProcessor(), + new NoopAttributesProcessor() ); const observable = new ObservableInstrument( defaultInstrumentDescriptor, @@ -138,11 +182,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 100, collectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 100, + collectionTime, + collectionTime + ); lastCollectionTime = collectionTime; } @@ -157,11 +208,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 1, lastCollectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 1, + lastCollectionTime, + collectionTime + ); lastCollectionTime = collectionTime; } @@ -176,11 +234,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 49, lastCollectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 49, + lastCollectionTime, + collectionTime + ); } }); @@ -190,7 +255,7 @@ describe('AsyncMetricStorage', () => { const metricStorage = new AsyncMetricStorage( defaultInstrumentDescriptor, new SumAggregator(false), - new NoopAttributesProcessor(), + new NoopAttributesProcessor() ); const observable = new ObservableInstrument( defaultInstrumentDescriptor, @@ -211,11 +276,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 100, collectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 100, + collectionTime, + collectionTime + ); lastCollectionTime = collectionTime; } @@ -230,13 +302,19 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, -99, lastCollectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + -99, + lastCollectionTime, + collectionTime + ); lastCollectionTime = collectionTime; - } // Observe a new data point @@ -250,11 +328,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 49, lastCollectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 49, + lastCollectionTime, + collectionTime + ); } }); }); @@ -267,7 +352,7 @@ describe('AsyncMetricStorage', () => { const metricStorage = new AsyncMetricStorage( defaultInstrumentDescriptor, new SumAggregator(true), - new NoopAttributesProcessor(), + new NoopAttributesProcessor() ); const observable = new ObservableInstrument( defaultInstrumentDescriptor, @@ -289,14 +374,33 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); startTime = collectionTime; assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 3); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 1, startTime, collectionTime); - assertDataPoint(metric.dataPoints[1], { key: '2' }, 2, startTime, collectionTime); - assertDataPoint(metric.dataPoints[2], { key: '3' }, 3, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 1, + startTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[1], + { key: '2' }, + 2, + startTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[2], + { key: '3' }, + 3, + startTime, + collectionTime + ); } delegate.setDelegate(observableResult => {}); @@ -307,13 +411,32 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 3); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 1, startTime, collectionTime); - assertDataPoint(metric.dataPoints[1], { key: '2' }, 2, startTime, collectionTime); - assertDataPoint(metric.dataPoints[2], { key: '3' }, 3, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 1, + startTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[1], + { key: '2' }, + 2, + startTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[2], + { key: '3' }, + 3, + startTime, + collectionTime + ); } delegate.setDelegate(observableResult => { @@ -327,13 +450,32 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 3); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 4, startTime, collectionTime); - assertDataPoint(metric.dataPoints[1], { key: '2' }, 5, startTime, collectionTime); - assertDataPoint(metric.dataPoints[2], { key: '3' }, 6, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 4, + startTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[1], + { key: '2' }, + 5, + startTime, + collectionTime + ); + assertDataPoint( + metric.dataPoints[2], + { key: '3' }, + 6, + startTime, + collectionTime + ); } }); @@ -343,7 +485,7 @@ describe('AsyncMetricStorage', () => { const metricStorage = new AsyncMetricStorage( defaultInstrumentDescriptor, new SumAggregator(true), - new NoopAttributesProcessor(), + new NoopAttributesProcessor() ); const observable = new ObservableInstrument( defaultInstrumentDescriptor, @@ -364,12 +506,19 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); startTime = collectionTime; assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 100, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 100, + startTime, + collectionTime + ); } // Observe a drop on the metric @@ -383,12 +532,19 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); // The startTime should be reset. - assertDataPoint(metric.dataPoints[0], { key: '1' }, 1, collectionTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 1, + collectionTime, + collectionTime + ); startTime = collectionTime; } @@ -403,11 +559,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 50, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 50, + startTime, + collectionTime + ); } }); @@ -417,7 +580,7 @@ describe('AsyncMetricStorage', () => { const metricStorage = new AsyncMetricStorage( defaultInstrumentDescriptor, new SumAggregator(false), - new NoopAttributesProcessor(), + new NoopAttributesProcessor() ); const observable = new ObservableInstrument( defaultInstrumentDescriptor, @@ -438,12 +601,19 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); startTime = collectionTime; assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 100, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 100, + startTime, + collectionTime + ); } // Observe a drop on the metric @@ -457,12 +627,19 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); // No reset on the value or the startTime - assertDataPoint(metric.dataPoints[0], { key: '1' }, 1, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 1, + startTime, + collectionTime + ); } // Observe a new data point @@ -476,11 +653,18 @@ describe('AsyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - collectionTime); + collectionTime + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], { key: '1' }, 50, startTime, collectionTime); + assertDataPoint( + metric.dataPoints[0], + { key: '1' }, + 50, + startTime, + collectionTime + ); } }); }); diff --git a/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts b/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts index 2bff4bec71..d2d69f8432 100644 --- a/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts +++ b/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts @@ -28,7 +28,12 @@ describe('DeltaMetricProcessor', () => { for (const value of commonValues) { for (const attributes of commonAttributes) { - metricProcessor.record(value, attributes, api.context.active(), [0, 0]); + metricProcessor.record( + value, + attributes, + api.context.active(), + [0, 0] + ); } } }); @@ -38,7 +43,12 @@ describe('DeltaMetricProcessor', () => { for (const value of commonValues) { for (const attributes of commonAttributes) { - metricProcessor.record(value, attributes, api.context.active(), [0, 0]); + metricProcessor.record( + value, + attributes, + api.context.active(), + [0, 0] + ); } } }); diff --git a/packages/sdk-metrics/test/state/MeterSharedState.test.ts b/packages/sdk-metrics/test/state/MeterSharedState.test.ts index 21f2dce7fd..55f7fe5386 100644 --- a/packages/sdk-metrics/test/state/MeterSharedState.test.ts +++ b/packages/sdk-metrics/test/state/MeterSharedState.test.ts @@ -22,10 +22,18 @@ import { View, Aggregation, MetricReader, - InstrumentType + InstrumentType, } from '../../src'; -import { assertMetricData, defaultInstrumentationScope, defaultResource, sleep } from '../util'; -import { TestDeltaMetricReader, TestMetricReader } from '../export/TestMetricReader'; +import { + assertMetricData, + defaultInstrumentationScope, + defaultResource, + sleep, +} from '../util'; +import { + TestDeltaMetricReader, + TestMetricReader, +} from '../export/TestMetricReader'; import { MeterSharedState } from '../../src/state/MeterSharedState'; import { CollectionResult } from '../../src/export/MetricData'; import { Meter } from '../../src/Meter'; @@ -47,7 +55,9 @@ describe('MeterSharedState', () => { return { meter, - meterSharedState: meterProvider['_sharedState'].getMeterSharedState({ name: 'test-meter' }), + meterSharedState: meterProvider['_sharedState'].getMeterSharedState({ + name: 'test-meter', + }), collectors: Array.from(meterProvider['_sharedState'].metricCollectors), }; } @@ -59,15 +69,20 @@ describe('MeterSharedState', () => { }, }); const { meter, meterSharedState, collectors } = setupMeter( - [ new View({ instrumentName: 'test-counter' }) ], - [reader], + [new View({ instrumentName: 'test-counter' })], + [reader] ); meter.createCounter('test-counter'); - const metricStorages = meterSharedState.metricStorageRegistry.getStorages(collectors[0]); + const metricStorages = meterSharedState.metricStorageRegistry.getStorages( + collectors[0] + ); assert.strictEqual(metricStorages.length, 1); - assert.strictEqual(metricStorages[0].getInstrumentDescriptor().name, 'test-counter'); + assert.strictEqual( + metricStorages[0].getInstrumentDescriptor().name, + 'test-counter' + ); }); it('should register metric storages with views', () => { @@ -77,15 +92,20 @@ describe('MeterSharedState', () => { }, }); const { meter, meterSharedState, collectors } = setupMeter( - [ new View({ instrumentName: 'test-counter' }) ], - [reader], + [new View({ instrumentName: 'test-counter' })], + [reader] ); meter.createCounter('test-counter'); - const metricStorages = meterSharedState.metricStorageRegistry.getStorages(collectors[0]); + const metricStorages = meterSharedState.metricStorageRegistry.getStorages( + collectors[0] + ); assert.strictEqual(metricStorages.length, 1); - assert.strictEqual(metricStorages[0].getInstrumentDescriptor().name, 'test-counter'); + assert.strictEqual( + metricStorages[0].getInstrumentDescriptor().name, + 'test-counter' + ); }); it('should register metric storages with the collector', () => { @@ -94,20 +114,28 @@ describe('MeterSharedState', () => { return Aggregation.Drop(); }, }); - const readerAggregationSelectorSpy = sinon.spy(reader, 'selectAggregation'); + const readerAggregationSelectorSpy = sinon.spy( + reader, + 'selectAggregation' + ); const { meter, meterSharedState, collectors } = setupMeter( - [], /** no views registered */ - [reader], + [] /** no views registered */, + [reader] ); meter.createCounter('test-counter'); - const metricStorages = meterSharedState.metricStorageRegistry.getStorages(collectors[0]); + const metricStorages = meterSharedState.metricStorageRegistry.getStorages( + collectors[0] + ); // Should select aggregation with the metric reader. assert.strictEqual(readerAggregationSelectorSpy.callCount, 1); assert.strictEqual(metricStorages.length, 1); - assert.strictEqual(metricStorages[0].getInstrumentDescriptor().name, 'test-counter'); + assert.strictEqual( + metricStorages[0].getInstrumentDescriptor().name, + 'test-counter' + ); }); it('should register metric storages with collectors', () => { @@ -123,28 +151,44 @@ describe('MeterSharedState', () => { }); const { meter, meterSharedState, collectors } = setupMeter( - [], /** no views registered */ - [reader, reader2], + [] /** no views registered */, + [reader, reader2] ); meter.createCounter('test-counter'); - const metricStorages = meterSharedState.metricStorageRegistry.getStorages(collectors[0]); - const metricStorages2 = meterSharedState.metricStorageRegistry.getStorages(collectors[1]); + const metricStorages = meterSharedState.metricStorageRegistry.getStorages( + collectors[0] + ); + const metricStorages2 = + meterSharedState.metricStorageRegistry.getStorages(collectors[1]); // Should select aggregation with the metric reader. assert.strictEqual(metricStorages.length, 1); - assert.strictEqual(metricStorages[0].getInstrumentDescriptor().name, 'test-counter'); + assert.strictEqual( + metricStorages[0].getInstrumentDescriptor().name, + 'test-counter' + ); assert.strictEqual(metricStorages2.length, 1); - assert.strictEqual(metricStorages2[0].getInstrumentDescriptor().name, 'test-counter'); + assert.strictEqual( + metricStorages2[0].getInstrumentDescriptor().name, + 'test-counter' + ); - assert.notStrictEqual(metricStorages[0], metricStorages2[0], 'should create a distinct metric storage for each metric reader'); + assert.notStrictEqual( + metricStorages[0], + metricStorages2[0], + 'should create a distinct metric storage for each metric reader' + ); }); }); describe('collect', () => { function setupInstruments(views?: View[]) { - const meterProvider = new MeterProvider({ resource: defaultResource, views: views }); + const meterProvider = new MeterProvider({ + resource: defaultResource, + views: views, + }); const cumulativeReader = new TestMetricReader(); meterProvider.addMetricReader(cumulativeReader); @@ -156,12 +200,22 @@ describe('MeterSharedState', () => { const metricCollectors = [cumulativeCollector, deltaCollector]; - const meter = meterProvider.getMeter(defaultInstrumentationScope.name, defaultInstrumentationScope.version, { - schemaUrl: defaultInstrumentationScope.schemaUrl, - }) as Meter; + const meter = meterProvider.getMeter( + defaultInstrumentationScope.name, + defaultInstrumentationScope.version, + { + schemaUrl: defaultInstrumentationScope.schemaUrl, + } + ) as Meter; const meterSharedState = meter['_meterSharedState'] as MeterSharedState; - return { metricCollectors, cumulativeCollector, deltaCollector, meter, meterSharedState }; + return { + metricCollectors, + cumulativeCollector, + deltaCollector, + meter, + meterSharedState, + }; } it('should collect sync metrics', async () => { @@ -173,22 +227,28 @@ describe('MeterSharedState', () => { /** collect metrics */ counter.add(1); - await Promise.all(metricCollectors.map(async collector => { - const { resourceMetrics, errors } = await collector.collect(); - assert.strictEqual(errors.length, 0); - assert.strictEqual(resourceMetrics.scopeMetrics.length, 1); - assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 1); - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'test', - }); - })); + await Promise.all( + metricCollectors.map(async collector => { + const { resourceMetrics, errors } = await collector.collect(); + assert.strictEqual(errors.length, 0); + assert.strictEqual(resourceMetrics.scopeMetrics.length, 1); + assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 1); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'test', + } + ); + }) + ); }); it('should collect sync metrics with views', async () => { /** preparing test instrumentations */ const { metricCollectors, meter } = setupInstruments([ new View({ name: 'foo', instrumentName: 'test' }), - new View({ name: 'bar', instrumentName: 'test' }) + new View({ name: 'bar', instrumentName: 'test' }), ]); /** creating metric events */ @@ -196,18 +256,28 @@ describe('MeterSharedState', () => { /** collect metrics */ counter.add(1); - await Promise.all(metricCollectors.map(async collector => { - const { resourceMetrics, errors } = await collector.collect(); - assert.strictEqual(errors.length, 0); - assert.strictEqual(resourceMetrics.scopeMetrics.length, 1); - assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 2); - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'foo', - }); - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[1], DataPointType.SUM, { - name: 'bar', - }); - })); + await Promise.all( + metricCollectors.map(async collector => { + const { resourceMetrics, errors } = await collector.collect(); + assert.strictEqual(errors.length, 0); + assert.strictEqual(resourceMetrics.scopeMetrics.length, 1); + assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 2); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'foo', + } + ); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[1], + DataPointType.SUM, + { + name: 'bar', + } + ); + }) + ); }); it('should collect async metrics with callbacks', async () => { @@ -247,7 +317,7 @@ describe('MeterSharedState', () => { /** preparing test instrumentations */ const { metricCollectors, meter } = setupInstruments([ new View({ name: 'foo', instrumentName: 'test' }), - new View({ name: 'bar', instrumentName: 'test' }) + new View({ name: 'bar', instrumentName: 'test' }), ]); /** creating metric events */ @@ -266,18 +336,28 @@ describe('MeterSharedState', () => { assert.strictEqual(errors.length, 0); assert.strictEqual(resourceMetrics.scopeMetrics.length, 1); assert.strictEqual(resourceMetrics.scopeMetrics[0].metrics.length, 2); - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[0], DataPointType.SUM, { - name: 'foo' - }); - assertMetricData(resourceMetrics.scopeMetrics[0].metrics[1], DataPointType.SUM, { - name: 'bar' - }); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[0], + DataPointType.SUM, + { + name: 'foo', + } + ); + assertMetricData( + resourceMetrics.scopeMetrics[0].metrics[1], + DataPointType.SUM, + { + name: 'bar', + } + ); } /** collect metrics */ await Promise.all([ // initiate collection concurrently. - ...metricCollectors.map(collector => collector.collect().then(verifyResult)), + ...metricCollectors.map(collector => + collector.collect().then(verifyResult) + ), sleep(1).then(() => metricCollectors[0].collect().then(verifyResult)), ]); /** @@ -288,7 +368,9 @@ describe('MeterSharedState', () => { /** collect metrics */ await Promise.all([ // initiate collection concurrently. - ...metricCollectors.map(collector => collector.collect().then(verifyResult)), + ...metricCollectors.map(collector => + collector.collect().then(verifyResult) + ), sleep(1).then(() => metricCollectors[0].collect().then(verifyResult)), ]); assert.strictEqual(observableCalledCount, 6); diff --git a/packages/sdk-metrics/test/state/MetricCollector.test.ts b/packages/sdk-metrics/test/state/MetricCollector.test.ts index 6f316a11ed..3c70b35759 100644 --- a/packages/sdk-metrics/test/state/MetricCollector.test.ts +++ b/packages/sdk-metrics/test/state/MetricCollector.test.ts @@ -29,7 +29,10 @@ import { ObservableCallbackDelegate, BatchObservableCallbackDelegate, } from '../util'; -import { TestDeltaMetricReader, TestMetricReader } from '../export/TestMetricReader'; +import { + TestDeltaMetricReader, + TestMetricReader, +} from '../export/TestMetricReader'; describe('MetricCollector', () => { afterEach(() => { @@ -38,10 +41,14 @@ describe('MetricCollector', () => { describe('constructor', () => { it('should construct MetricCollector without exceptions', () => { - const meterProviderSharedState = new MeterProviderSharedState(defaultResource); - const readers = [ new TestMetricReader(), new TestDeltaMetricReader() ]; + const meterProviderSharedState = new MeterProviderSharedState( + defaultResource + ); + const readers = [new TestMetricReader(), new TestDeltaMetricReader()]; for (const reader of readers) { - assert.doesNotThrow(() => new MetricCollector(meterProviderSharedState, reader)); + assert.doesNotThrow( + () => new MetricCollector(meterProviderSharedState, reader) + ); } }); }); @@ -54,9 +61,13 @@ describe('MetricCollector', () => { meterProvider.addMetricReader(reader); const metricCollector = reader.getMetricCollector(); - const meter = meterProvider.getMeter(defaultInstrumentationScope.name, defaultInstrumentationScope.version, { - schemaUrl: defaultInstrumentationScope.schemaUrl, - }); + const meter = meterProvider.getMeter( + defaultInstrumentationScope.name, + defaultInstrumentationScope.version, + { + schemaUrl: defaultInstrumentationScope.schemaUrl, + } + ); return { metricCollector, meter }; } @@ -83,7 +94,7 @@ describe('MetricCollector', () => { /** checking batch[0] */ const metricData1 = metrics[0]; assertMetricData(metricData1, DataPointType.SUM, { - name: 'counter1' + name: 'counter1', }); assert.strictEqual(metricData1.dataPoints.length, 2); assertDataPoint(metricData1.dataPoints[0], {}, 1); @@ -92,7 +103,7 @@ describe('MetricCollector', () => { /** checking batch[1] */ const metricData2 = metrics[1]; assertMetricData(metricData2, DataPointType.SUM, { - name: 'counter2' + name: 'counter2', }); assert.strictEqual(metricData2.dataPoints.length, 1); assertDataPoint(metricData2.dataPoints[0], {}, 3); @@ -116,7 +127,10 @@ describe('MetricCollector', () => { const delegate2 = new BatchObservableCallbackDelegate(); const observableCounter2 = meter.createObservableCounter('observable2'); const observableCounter3 = meter.createObservableCounter('observable3'); - meter.addBatchObservableCallback(delegate2.getCallback(), [ observableCounter2, observableCounter3 ]); + meter.addBatchObservableCallback(delegate2.getCallback(), [ + observableCounter2, + observableCounter3, + ]); delegate2.setDelegate(observableResult => { observableResult.observe(observableCounter2, 3, {}); observableResult.observe(observableCounter2, 4, { foo: 'bar' }); @@ -132,7 +146,7 @@ describe('MetricCollector', () => { /** checking batch[0] */ const metricData1 = metrics[0]; assertMetricData(metricData1, DataPointType.SUM, { - name: 'observable1' + name: 'observable1', }); assert.strictEqual(metricData1.dataPoints.length, 2); assertDataPoint(metricData1.dataPoints[0], {}, 1); @@ -141,7 +155,7 @@ describe('MetricCollector', () => { /** checking batch[1] */ const metricData2 = metrics[1]; assertMetricData(metricData2, DataPointType.SUM, { - name: 'observable2' + name: 'observable2', }); assert.strictEqual(metricData2.dataPoints.length, 2); assertDataPoint(metricData2.dataPoints[0], {}, 3); @@ -150,7 +164,7 @@ describe('MetricCollector', () => { /** checking batch[2] */ const metricData3 = metrics[2]; assertMetricData(metricData3, DataPointType.SUM, { - name: 'observable3' + name: 'observable3', }); assert.strictEqual(metricData3.dataPoints.length, 0); }); @@ -195,13 +209,13 @@ describe('MetricCollector', () => { /** observer1 */ assertMetricData(metrics[0], DataPointType.SUM, { - name: 'observer1' + name: 'observer1', }); assert.strictEqual(metrics[0].dataPoints.length, 0); /** observer2 */ assertMetricData(metrics[1], DataPointType.SUM, { - name: 'observer2' + name: 'observer2', }); assert.strictEqual(metrics[1].dataPoints.length, 1); } @@ -225,14 +239,14 @@ describe('MetricCollector', () => { /** observer1 */ assertMetricData(metrics[0], DataPointType.SUM, { - name: 'observer1' + name: 'observer1', }); assert.strictEqual(metrics[0].dataPoints.length, 1); assertDataPoint(metrics[0].dataPoints[0], {}, 100); /** observer2 */ assertMetricData(metrics[1], DataPointType.SUM, { - name: 'observer2' + name: 'observer2', }); assert.strictEqual(metrics[1].dataPoints.length, 1); } @@ -262,13 +276,13 @@ describe('MetricCollector', () => { /** counter1 data points are collected */ assertMetricData(metrics[0], DataPointType.SUM, { - name: 'counter1' + name: 'counter1', }); assert.strictEqual(metrics[0].dataPoints.length, 1); /** observer1 data points are not collected */ assertMetricData(metrics[1], DataPointType.SUM, { - name: 'observer1' + name: 'observer1', }); assert.strictEqual(metrics[1].dataPoints.length, 0); }); @@ -283,7 +297,9 @@ describe('MetricCollector', () => { /** observer1 is an abnormal observer */ const observableCounter1 = meter.createObservableCounter('observer1'); const delegate1 = new BatchObservableCallbackDelegate(); - meter.addBatchObservableCallback(delegate1.getCallback(), [ observableCounter1 ]); + meter.addBatchObservableCallback(delegate1.getCallback(), [ + observableCounter1, + ]); delegate1.setDelegate(_observableResult => { return new Promise(() => { /** promise never settles */ @@ -293,7 +309,9 @@ describe('MetricCollector', () => { /** observer2 is a normal observer */ const observableCounter2 = meter.createObservableCounter('observer2'); const delegate2 = new BatchObservableCallbackDelegate(); - meter.addBatchObservableCallback(delegate2.getCallback(), [ observableCounter2 ]); + meter.addBatchObservableCallback(delegate2.getCallback(), [ + observableCounter2, + ]); delegate2.setDelegate(observableResult => { observableResult.observe(observableCounter2, 1, {}); }); @@ -313,13 +331,13 @@ describe('MetricCollector', () => { /** observer1 */ assertMetricData(metrics[0], DataPointType.SUM, { - name: 'observer1' + name: 'observer1', }); assert.strictEqual(metrics[0].dataPoints.length, 0); /** observer2 */ assertMetricData(metrics[1], DataPointType.SUM, { - name: 'observer2' + name: 'observer2', }); assert.strictEqual(metrics[1].dataPoints.length, 1); } @@ -343,14 +361,14 @@ describe('MetricCollector', () => { /** observer1 */ assertMetricData(metrics[0], DataPointType.SUM, { - name: 'observer1' + name: 'observer1', }); assert.strictEqual(metrics[0].dataPoints.length, 1); assertDataPoint(metrics[0].dataPoints[0], {}, 100); /** observer2 */ assertMetricData(metrics[1], DataPointType.SUM, { - name: 'observer2' + name: 'observer2', }); assert.strictEqual(metrics[1].dataPoints.length, 1); } @@ -367,7 +385,9 @@ describe('MetricCollector', () => { /** observer1 is an abnormal observer */ const observableCounter1 = meter.createObservableCounter('observer1'); const delegate1 = new BatchObservableCallbackDelegate(); - meter.addBatchObservableCallback(delegate1.getCallback(), [ observableCounter1 ]); + meter.addBatchObservableCallback(delegate1.getCallback(), [ + observableCounter1, + ]); delegate1.setDelegate(_observableResult => { throw new Error('foobar'); }); @@ -382,13 +402,13 @@ describe('MetricCollector', () => { /** counter1 data points are collected */ assertMetricData(metrics[0], DataPointType.SUM, { - name: 'counter1' + name: 'counter1', }); assert.strictEqual(metrics[0].dataPoints.length, 1); /** observer1 data points are not collected */ assertMetricData(metrics[1], DataPointType.SUM, { - name: 'observer1' + name: 'observer1', }); assert.strictEqual(metrics[1].dataPoints.length, 0); }); diff --git a/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts b/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts index 53ffa5a75a..62d305537b 100644 --- a/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts +++ b/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts @@ -25,22 +25,26 @@ import * as assert from 'assert'; import * as sinon from 'sinon'; import { getDescriptionResolutionRecipe, - getTypeConflictResolutionRecipe, getUnitConflictResolutionRecipe, - getValueTypeConflictResolutionRecipe + getTypeConflictResolutionRecipe, + getUnitConflictResolutionRecipe, + getValueTypeConflictResolutionRecipe, } from '../../src/view/RegistrationConflicts'; class TestMetricStorage extends MetricStorage { collect( collector: MetricCollectorHandle, collectors: MetricCollectorHandle[], - collectionTime: HrTime, + collectionTime: HrTime ): Maybe { return undefined; } } describe('MetricStorageRegistry', () => { - let spyLoggerWarn: sinon.SinonStub<[message: string, ...args: unknown[]], void>; + let spyLoggerWarn: sinon.SinonStub< + [message: string, ...args: unknown[]], + void + >; beforeEach(() => { spyLoggerWarn = sinon.stub(diag, 'warn'); @@ -69,7 +73,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }); registry.register(storage); @@ -88,14 +92,14 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }); const storage2 = new TestMetricStorage({ name: 'instrument2', type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }); registry.registerForCollector(collectorHandle, storage); @@ -103,7 +107,10 @@ describe('MetricStorageRegistry', () => { registry.registerForCollector(collectorHandle2, storage2); assert.deepStrictEqual(registry.getStorages(collectorHandle), [storage]); - assert.deepStrictEqual(registry.getStorages(collectorHandle2), [storage, storage2]); + assert.deepStrictEqual(registry.getStorages(collectorHandle2), [ + storage, + storage2, + ]); }); }); @@ -118,11 +125,17 @@ describe('MetricStorageRegistry', () => { const storage = new TestMetricStorage(existingDescriptor); const otherStorage = new TestMetricStorage(otherDescriptor); - assert.strictEqual(registry.findOrUpdateCompatibleStorage(existingDescriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleStorage(existingDescriptor), + null + ); registry.register(storage); assertLogNotCalled(); - assert.strictEqual(registry.findOrUpdateCompatibleStorage(otherDescriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleStorage(otherDescriptor), + null + ); // warned assertLogCalledOnce(); assertFirstLogContains(expectedLog); @@ -139,7 +152,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const otherDescriptor = { @@ -147,12 +160,14 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.UP_DOWN_COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; - testConflictingRegistration(existingDescriptor, + testConflictingRegistration( + existingDescriptor, otherDescriptor, - getTypeConflictResolutionRecipe(existingDescriptor, otherDescriptor)); + getTypeConflictResolutionRecipe(existingDescriptor, otherDescriptor) + ); }); it('warn when instrument with same name and different value type is already registered', () => { @@ -161,7 +176,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const otherDescriptor = { @@ -169,12 +184,17 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.INT + valueType: ValueType.INT, }; - testConflictingRegistration(existingDescriptor, + testConflictingRegistration( + existingDescriptor, otherDescriptor, - getValueTypeConflictResolutionRecipe(existingDescriptor, otherDescriptor)); + getValueTypeConflictResolutionRecipe( + existingDescriptor, + otherDescriptor + ) + ); }); it('warn when instrument with same name and different unit is already registered', () => { @@ -183,7 +203,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const otherDescriptor = { @@ -191,12 +211,14 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: 'ms', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; - testConflictingRegistration(existingDescriptor, + testConflictingRegistration( + existingDescriptor, otherDescriptor, - getUnitConflictResolutionRecipe(existingDescriptor, otherDescriptor)); + getUnitConflictResolutionRecipe(existingDescriptor, otherDescriptor) + ); }); it('warn when instrument with same name and different description is already registered', () => { @@ -205,7 +227,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const otherDescriptor = { @@ -213,7 +235,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'longer description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const registry = new MetricStorageRegistry(); @@ -222,17 +244,28 @@ describe('MetricStorageRegistry', () => { const otherStorage = new TestMetricStorage(otherDescriptor); // register the first storage. - assert.strictEqual(registry.findOrUpdateCompatibleStorage(existingDescriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleStorage(existingDescriptor), + null + ); registry.register(storage); // register the second storage. - assert.strictEqual(registry.findOrUpdateCompatibleStorage(otherDescriptor), storage); + assert.strictEqual( + registry.findOrUpdateCompatibleStorage(otherDescriptor), + storage + ); // original storage now has the updated (longer) description. - assert.strictEqual(otherStorage.getInstrumentDescriptor().description, otherDescriptor.description); + assert.strictEqual( + otherStorage.getInstrumentDescriptor().description, + otherDescriptor.description + ); // log called exactly once assertLogCalledOnce(); // added resolution recipe to the log - assertFirstLogContains(getDescriptionResolutionRecipe(existingDescriptor, otherDescriptor)); + assertFirstLogContains( + getDescriptionResolutionRecipe(existingDescriptor, otherDescriptor) + ); }); it('should return the existing instrument if a compatible async instrument is already registered', () => { @@ -242,13 +275,16 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.OBSERVABLE_COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const storage = new TestMetricStorage(descriptor); registry.register(storage); - assert.strictEqual(registry.findOrUpdateCompatibleStorage(descriptor), storage); + assert.strictEqual( + registry.findOrUpdateCompatibleStorage(descriptor), + storage + ); }); it('should return the existing instrument if a compatible sync instrument is already registered', () => { @@ -258,13 +294,16 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const storage = new TestMetricStorage(descriptor); registry.register(storage); - assert.strictEqual(registry.findOrUpdateCompatibleStorage(descriptor), storage); + assert.strictEqual( + registry.findOrUpdateCompatibleStorage(descriptor), + storage + ); }); function assertLogNotCalled() { @@ -276,7 +315,10 @@ describe('MetricStorageRegistry', () => { } function assertFirstLogContains(expectedString: string) { - assert.ok(spyLoggerWarn.args[0].includes(expectedString), 'Logs did not include: ' + expectedString); + assert.ok( + spyLoggerWarn.args[0].includes(expectedString), + 'Logs did not include: ' + expectedString + ); } }); @@ -287,7 +329,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const otherDescriptor = { @@ -295,7 +337,7 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.UP_DOWN_COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const registry = new MetricStorageRegistry(); @@ -303,11 +345,23 @@ describe('MetricStorageRegistry', () => { const storage = new TestMetricStorage(existingDescriptor); const otherStorage = new TestMetricStorage(otherDescriptor); - assert.strictEqual(registry.findOrUpdateCompatibleCollectorStorage(collectorHandle, existingDescriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleCollectorStorage( + collectorHandle, + existingDescriptor + ), + null + ); registry.registerForCollector(collectorHandle, storage); // Should not return an existing metric storage. - assert.strictEqual(registry.findOrUpdateCompatibleCollectorStorage(collectorHandle, otherDescriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleCollectorStorage( + collectorHandle, + otherDescriptor + ), + null + ); registry.registerForCollector(collectorHandle, otherStorage); // registered both storages @@ -321,16 +375,28 @@ describe('MetricStorageRegistry', () => { type: InstrumentType.COUNTER, description: 'description', unit: '1', - valueType: ValueType.DOUBLE + valueType: ValueType.DOUBLE, }; const registry = new MetricStorageRegistry(); const storage = new TestMetricStorage(descriptor); - assert.strictEqual(registry.findOrUpdateCompatibleCollectorStorage(collectorHandle, descriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleCollectorStorage( + collectorHandle, + descriptor + ), + null + ); registry.registerForCollector(collectorHandle, storage); - assert.strictEqual(registry.findOrUpdateCompatibleCollectorStorage(collectorHandle2, descriptor), null); + assert.strictEqual( + registry.findOrUpdateCompatibleCollectorStorage( + collectorHandle2, + descriptor + ), + null + ); registry.registerForCollector(collectorHandle2, storage); // registered the storage for each collector diff --git a/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts b/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts index 1ccae07723..f2ee109f13 100644 --- a/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts +++ b/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts @@ -20,7 +20,12 @@ import { hrTime } from '@opentelemetry/core'; import * as assert from 'assert'; import { MultiMetricStorage } from '../../src/state/MultiWritableMetricStorage'; import { WritableMetricStorage } from '../../src/state/WritableMetricStorage'; -import { assertMeasurementEqual, commonAttributes, commonValues, Measurement } from '../util'; +import { + assertMeasurementEqual, + commonAttributes, + commonValues, + Measurement, +} from '../util'; describe('MultiMetricStorage', () => { describe('record', () => { @@ -37,14 +42,21 @@ describe('MultiMetricStorage', () => { it('record with multiple backing storages', () => { class TestWritableMetricStorage implements WritableMetricStorage { records: Measurement[] = []; - record(value: number, attributes: MetricAttributes, context: api.Context): void { + record( + value: number, + attributes: MetricAttributes, + context: api.Context + ): void { this.records.push({ value, attributes, context }); } } const backingStorage1 = new TestWritableMetricStorage(); const backingStorage2 = new TestWritableMetricStorage(); - const metricStorage = new MultiMetricStorage([backingStorage1, backingStorage2]); + const metricStorage = new MultiMetricStorage([ + backingStorage1, + backingStorage2, + ]); const expectedMeasurements: Measurement[] = []; for (const value of commonValues) { @@ -55,8 +67,14 @@ describe('MultiMetricStorage', () => { } } - assert.strictEqual(backingStorage1.records.length, expectedMeasurements.length); - assert.strictEqual(backingStorage2.records.length, expectedMeasurements.length); + assert.strictEqual( + backingStorage1.records.length, + expectedMeasurements.length + ); + assert.strictEqual( + backingStorage2.records.length, + expectedMeasurements.length + ); for (const [idx, expected] of expectedMeasurements.entries()) { assertMeasurementEqual(backingStorage1.records[idx], expected); assertMeasurementEqual(backingStorage2.records[idx], expected); diff --git a/packages/sdk-metrics/test/state/ObservableRegistry.test.ts b/packages/sdk-metrics/test/state/ObservableRegistry.test.ts index 7be5b0b96b..8486042f88 100644 --- a/packages/sdk-metrics/test/state/ObservableRegistry.test.ts +++ b/packages/sdk-metrics/test/state/ObservableRegistry.test.ts @@ -28,8 +28,16 @@ describe('ObservableRegistry', () => { beforeEach(() => { observableRegistry = new ObservableRegistry(); - instrument1 = new ObservableInstrument(defaultInstrumentDescriptor, [], observableRegistry); - instrument2 = new ObservableInstrument(defaultInstrumentDescriptor, [], observableRegistry); + instrument1 = new ObservableInstrument( + defaultInstrumentDescriptor, + [], + observableRegistry + ); + instrument2 = new ObservableInstrument( + defaultInstrumentDescriptor, + [], + observableRegistry + ); }); describe('addCallback', () => { @@ -38,11 +46,23 @@ describe('ObservableRegistry', () => { observableRegistry.addCallback(callback2, instrument1); assert.strictEqual(observableRegistry['_callbacks'].length, 2); - assert.strictEqual(observableRegistry['_callbacks'][0].callback, callback1); - assert.strictEqual(observableRegistry['_callbacks'][0].instrument, instrument1); - - assert.strictEqual(observableRegistry['_callbacks'][1].callback, callback2); - assert.strictEqual(observableRegistry['_callbacks'][1].instrument, instrument1); + assert.strictEqual( + observableRegistry['_callbacks'][0].callback, + callback1 + ); + assert.strictEqual( + observableRegistry['_callbacks'][0].instrument, + instrument1 + ); + + assert.strictEqual( + observableRegistry['_callbacks'][1].callback, + callback2 + ); + assert.strictEqual( + observableRegistry['_callbacks'][1].instrument, + instrument1 + ); }); it('should not add duplicated callbacks', () => { @@ -50,8 +70,14 @@ describe('ObservableRegistry', () => { observableRegistry.addCallback(callback1, instrument1); assert.strictEqual(observableRegistry['_callbacks'].length, 1); - assert.strictEqual(observableRegistry['_callbacks'][0].callback, callback1); - assert.strictEqual(observableRegistry['_callbacks'][0].instrument, instrument1); + assert.strictEqual( + observableRegistry['_callbacks'][0].callback, + callback1 + ); + assert.strictEqual( + observableRegistry['_callbacks'][0].instrument, + instrument1 + ); }); }); @@ -76,8 +102,14 @@ describe('ObservableRegistry', () => { observableRegistry.addBatchCallback(callback2, [instrument1]); // duplicated pairs. - observableRegistry.addBatchCallback(callback1, [instrument1, instrument2]); - observableRegistry.addBatchCallback(callback1, [instrument1, instrument2]); + observableRegistry.addBatchCallback(callback1, [ + instrument1, + instrument2, + ]); + observableRegistry.addBatchCallback(callback1, [ + instrument1, + instrument2, + ]); assert.strictEqual(observableRegistry['_batchCallbacks'].length, 3); }); @@ -85,7 +117,12 @@ describe('ObservableRegistry', () => { it('should ignore callback without associated instruments', () => { observableRegistry.addBatchCallback(callback1, []); // eslint-disable-next-line no-sparse-arrays - observableRegistry.addBatchCallback(callback1, [1, /* hole */, undefined, 2] as unknown as ObservableInstrument[]); + observableRegistry.addBatchCallback(callback1, [ + 1 /* hole */, + , + undefined, + 2, + ] as unknown as ObservableInstrument[]); assert.strictEqual(observableRegistry['_batchCallbacks'].length, 0); }); @@ -95,15 +132,24 @@ describe('ObservableRegistry', () => { it('should remove callback with associated instruments', () => { observableRegistry.addBatchCallback(callback1, [instrument1]); observableRegistry.addBatchCallback(callback2, [instrument1]); - observableRegistry.addBatchCallback(callback1, [instrument1, instrument2]); + observableRegistry.addBatchCallback(callback1, [ + instrument1, + instrument2, + ]); assert.strictEqual(observableRegistry['_batchCallbacks'].length, 3); observableRegistry.removeBatchCallback(callback1, [instrument1]); assert.strictEqual(observableRegistry['_batchCallbacks'].length, 2); // remove twice - observableRegistry.removeBatchCallback(callback1, [instrument1, instrument2]); - observableRegistry.removeBatchCallback(callback1, [instrument1, instrument2]); + observableRegistry.removeBatchCallback(callback1, [ + instrument1, + instrument2, + ]); + observableRegistry.removeBatchCallback(callback1, [ + instrument1, + instrument2, + ]); assert.strictEqual(observableRegistry['_batchCallbacks'].length, 1); }); }); diff --git a/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts b/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts index 13f0b91492..8e568be19e 100644 --- a/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts +++ b/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts @@ -23,7 +23,13 @@ import { DataPointType } from '../../src/export/MetricData'; import { MetricCollectorHandle } from '../../src/state/MetricCollector'; import { SyncMetricStorage } from '../../src/state/SyncMetricStorage'; import { NoopAttributesProcessor } from '../../src/view/AttributesProcessor'; -import { assertMetricData, assertDataPoint, commonAttributes, commonValues, defaultInstrumentDescriptor } from '../util'; +import { + assertMetricData, + assertDataPoint, + commonAttributes, + commonValues, + defaultInstrumentDescriptor, +} from '../util'; const deltaCollector: MetricCollectorHandle = { selectAggregationTemporality: () => AggregationTemporality.DELTA, @@ -66,7 +72,8 @@ describe('SyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - [3, 3]); + [3, 3] + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); @@ -78,7 +85,8 @@ describe('SyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, collectors, - [4, 4]); + [4, 4] + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 0); @@ -89,7 +97,8 @@ describe('SyncMetricStorage', () => { const metric = metricStorage.collect( deltaCollector, [deltaCollector], - [6, 6]); + [6, 6] + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); @@ -113,7 +122,8 @@ describe('SyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - [3, 3]); + [3, 3] + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); @@ -125,7 +135,8 @@ describe('SyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - [4, 4]); + [4, 4] + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); @@ -137,7 +148,8 @@ describe('SyncMetricStorage', () => { const metric = metricStorage.collect( cumulativeCollector, collectors, - [6, 6]); + [6, 6] + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); diff --git a/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts b/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts index 27bc2f1bbf..8d0f165cd0 100644 --- a/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts +++ b/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts @@ -23,7 +23,11 @@ import { DataPointType } from '../../src/export/MetricData'; import { DeltaMetricProcessor } from '../../src/state/DeltaMetricProcessor'; import { MetricCollectorHandle } from '../../src/state/MetricCollector'; import { TemporalMetricProcessor } from '../../src/state/TemporalMetricProcessor'; -import { assertMetricData, assertDataPoint, defaultInstrumentDescriptor } from '../util'; +import { + assertMetricData, + assertDataPoint, + defaultInstrumentDescriptor, +} from '../util'; const deltaCollector1: MetricCollectorHandle = { selectAggregationTemporality: () => AggregationTemporality.DELTA, @@ -44,7 +48,7 @@ describe('TemporalMetricProcessor', () => { describe('buildMetrics', () => { describe('single delta collector', () => { - const collectors = [ deltaCollector1 ]; + const collectors = [deltaCollector1]; it('should build delta recording metrics', () => { const spy = sinon.spy(deltaCollector1, 'selectAggregationTemporality'); @@ -60,12 +64,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2]); + [2, 2] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.DELTA); + AggregationTemporality.DELTA + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); } @@ -77,12 +84,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [4, 4]); + [4, 4] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.DELTA); + AggregationTemporality.DELTA + ); assert.strictEqual(metric.dataPoints.length, 1); // Time span: (lastCollectionTime, collectionTime) assertDataPoint(metric.dataPoints[0], {}, 2, [2, 2], [4, 4]); @@ -94,12 +104,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [5, 5]); + [5, 5] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.DELTA); + AggregationTemporality.DELTA + ); assert.strictEqual(metric.dataPoints.length, 0); } @@ -109,7 +122,7 @@ describe('TemporalMetricProcessor', () => { }); describe('two delta collector', () => { - const collectors = [ deltaCollector1, deltaCollector2 ]; + const collectors = [deltaCollector1, deltaCollector2]; it('should build delta recording metrics', () => { const aggregator = new SumAggregator(true); @@ -123,12 +136,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2]); + [2, 2] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.DELTA); + AggregationTemporality.DELTA + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); } @@ -139,12 +155,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [3, 3]); + [3, 3] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.DELTA); + AggregationTemporality.DELTA + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [3, 3]); } @@ -152,9 +171,12 @@ describe('TemporalMetricProcessor', () => { }); describe('single cumulative collector', () => { - const collectors = [ cumulativeCollector1 ]; + const collectors = [cumulativeCollector1]; it('should build delta recording metrics', () => { - const spy = sinon.spy(cumulativeCollector1, 'selectAggregationTemporality'); + const spy = sinon.spy( + cumulativeCollector1, + 'selectAggregationTemporality' + ); const aggregator = new SumAggregator(true); const deltaMetricStorage = new DeltaMetricProcessor(aggregator); @@ -167,12 +189,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2]); + [2, 2] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE); + AggregationTemporality.CUMULATIVE + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); } @@ -184,12 +209,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [4, 4]); + [4, 4] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE); + AggregationTemporality.CUMULATIVE + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 3, [1, 1], [4, 4]); } @@ -200,7 +228,7 @@ describe('TemporalMetricProcessor', () => { }); describe('cumulative collector with delta collector', () => { - const collectors = [ deltaCollector1, cumulativeCollector1 ]; + const collectors = [deltaCollector1, cumulativeCollector1]; it('should build delta recording metrics', () => { const aggregator = new SumAggregator(true); const deltaMetricStorage = new DeltaMetricProcessor(aggregator); @@ -213,12 +241,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2]); + [2, 2] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE); + AggregationTemporality.CUMULATIVE + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); } @@ -230,12 +261,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [4, 4]); + [4, 4] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.DELTA); + AggregationTemporality.DELTA + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 3, [1, 1], [4, 4]); } @@ -245,12 +279,15 @@ describe('TemporalMetricProcessor', () => { collectors, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [5, 5]); + [5, 5] + ); - assertMetricData(metric, + assertMetricData( + metric, DataPointType.SUM, defaultInstrumentDescriptor, - AggregationTemporality.CUMULATIVE); + AggregationTemporality.CUMULATIVE + ); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint(metric.dataPoints[0], {}, 3, [1, 1], [5, 5]); } diff --git a/packages/sdk-metrics/test/test-utils.ts b/packages/sdk-metrics/test/test-utils.ts index 82533608cd..97d63a8e6c 100644 --- a/packages/sdk-metrics/test/test-utils.ts +++ b/packages/sdk-metrics/test/test-utils.ts @@ -21,13 +21,16 @@ import * as assert from 'assert'; interface ErrorLikeConstructor { - new(): Error; + new (): Error; } /** * Node.js v8.x and browser compatible `assert.rejects`. */ -export async function assertRejects(actual: any, expected: RegExp | ErrorLikeConstructor) { +export async function assertRejects( + actual: any, + expected: RegExp | ErrorLikeConstructor +) { let rejected; try { if (typeof actual === 'function') { diff --git a/packages/sdk-metrics/test/util.ts b/packages/sdk-metrics/test/util.ts index 7227698a93..fa7a54b07f 100644 --- a/packages/sdk-metrics/test/util.ts +++ b/packages/sdk-metrics/test/util.ts @@ -24,12 +24,15 @@ import { import { InstrumentationScope } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import * as assert from 'assert'; -import { InstrumentDescriptor, InstrumentType } from '../src/InstrumentDescriptor'; +import { + InstrumentDescriptor, + InstrumentType, +} from '../src/InstrumentDescriptor'; import { MetricData, DataPoint, DataPointType, - ScopeMetrics + ScopeMetrics, } from '../src/export/MetricData'; import { isNotNullish } from '../src/utils'; import { HrTime } from '@opentelemetry/api'; @@ -39,13 +42,15 @@ import { AggregationTemporality } from '../src/export/AggregationTemporality'; export type Measurement = { value: number; // TODO: use common attributes - attributes: MetricAttributes + attributes: MetricAttributes; context?: Context; }; -export const defaultResource = Resource.default().merge(new Resource({ - resourceKey: 'my-resource', -})); +export const defaultResource = Resource.default().merge( + new Resource({ + resourceKey: 'my-resource', + }) +); export const defaultInstrumentDescriptor: InstrumentDescriptor = { name: 'default_metric', @@ -58,13 +63,18 @@ export const defaultInstrumentDescriptor: InstrumentDescriptor = { export const defaultInstrumentationScope: InstrumentationScope = { name: 'default', version: '1.0.0', - schemaUrl: 'https://opentelemetry.io/schemas/1.7.0' + schemaUrl: 'https://opentelemetry.io/schemas/1.7.0', }; export const commonValues: number[] = [1, -1, 1.0, Infinity, -Infinity, NaN]; -export const commonAttributes: MetricAttributes[] = [{}, { 1: '1' }, { a: '2' }, new (class Foo { - a = '1'; -})]; +export const commonAttributes: MetricAttributes[] = [ + {}, + { 1: '1' }, + { a: '2' }, + new (class Foo { + a = '1'; + })(), +]; export const sleep = (time: number) => new Promise(resolve => { @@ -84,7 +94,7 @@ export function assertMetricData( actual: unknown, dataPointType?: DataPointType, instrumentDescriptor: Partial | null = defaultInstrumentDescriptor, - aggregationTemporality?: AggregationTemporality, + aggregationTemporality?: AggregationTemporality ): asserts actual is MetricData { const it = actual as MetricData; if (instrumentDescriptor != null) { @@ -106,13 +116,17 @@ export function assertDataPoint( attributes: MetricAttributes, point: Histogram | number, startTime?: HrTime, - endTime?: HrTime, + endTime?: HrTime ): asserts actual is DataPoint { const it = actual as DataPoint; assert.deepStrictEqual(it.attributes, attributes); assert.deepStrictEqual(it.value, point); if (startTime) { - assert.deepStrictEqual(it.startTime, startTime, 'startTime should be equal'); + assert.deepStrictEqual( + it.startTime, + startTime, + 'startTime should be equal' + ); } else { assert(Array.isArray(it.startTime)); assert.strictEqual(it.startTime.length, 2, 'startTime should be equal'); @@ -125,25 +139,39 @@ export function assertDataPoint( } } -export function assertMeasurementEqual(actual: unknown, expected: Measurement): asserts actual is Measurement { +export function assertMeasurementEqual( + actual: unknown, + expected: Measurement +): asserts actual is Measurement { // NOTE: Node.js v8 assert.strictEquals treat two NaN as different values. if (Number.isNaN(expected.value)) { assert(Number.isNaN((actual as Measurement).value)); } else { assert.strictEqual((actual as Measurement).value, expected.value); } - assert.deepStrictEqual((actual as Measurement).attributes, expected.attributes); + assert.deepStrictEqual( + (actual as Measurement).attributes, + expected.attributes + ); assert.deepStrictEqual((actual as Measurement).context, expected.context); } -export function assertPartialDeepStrictEqual(actual: unknown, expected: T, message?: string): asserts actual is T { +export function assertPartialDeepStrictEqual( + actual: unknown, + expected: T, + message?: string +): asserts actual is T { assert.strictEqual(typeof actual, typeof expected, message); if (typeof expected !== 'object' && typeof expected !== 'function') { return; } const ownNames = Object.getOwnPropertyNames(expected); for (const ownName of ownNames) { - assert.deepStrictEqual((actual as any)[ownName], (expected as any)[ownName], `${ownName} not equals: ${message ?? ''}`); + assert.deepStrictEqual( + (actual as any)[ownName], + (expected as any)[ownName], + `${ownName} not equals: ${message ?? ''}` + ); } } diff --git a/packages/sdk-metrics/test/utils.test.ts b/packages/sdk-metrics/test/utils.test.ts index 2bf818088a..04633a14ba 100644 --- a/packages/sdk-metrics/test/utils.test.ts +++ b/packages/sdk-metrics/test/utils.test.ts @@ -38,18 +38,25 @@ describe('utils', () => { describe('hashAttributes', () => { it('should hash all types of attribute values', () => { const cases: [MetricAttributes, string][] = [ - [{ 'string': 'bar' }, '[["string","bar"]]'], - [{ 'number': 1 }, '[["number",1]]'], - [{ 'false': false, 'true': true }, '[["false",false],["true",true]]'], - [{ 'arrayOfString': ['foo','bar'] }, '[["arrayOfString",["foo","bar"]]]'], - [{ 'arrayOfNumber': [1,2] }, '[["arrayOfNumber",[1,2]]]'], - [{ 'arrayOfBool': [false,true] }, '[["arrayOfBool",[false,true]]]'], - [{ 'undefined': undefined }, '[["undefined",null]]'], - [{ 'arrayOfHoles': [undefined, null] }, '[["arrayOfHoles",[null,null]]]'], + [{ string: 'bar' }, '[["string","bar"]]'], + [{ number: 1 }, '[["number",1]]'], + [{ false: false, true: true }, '[["false",false],["true",true]]'], + [ + { arrayOfString: ['foo', 'bar'] }, + '[["arrayOfString",["foo","bar"]]]', + ], + [{ arrayOfNumber: [1, 2] }, '[["arrayOfNumber",[1,2]]]'], + [{ arrayOfBool: [false, true] }, '[["arrayOfBool",[false,true]]]'], + [{ undefined: undefined }, '[["undefined",null]]'], + [{ arrayOfHoles: [undefined, null] }, '[["arrayOfHoles",[null,null]]]'], ]; for (const [idx, it] of cases.entries()) { - assert.strictEqual(hashAttributes(it[0]), it[1], `cases[${idx}] failed`); + assert.strictEqual( + hashAttributes(it[0]), + it[1], + `cases[${idx}] failed` + ); } }); }); diff --git a/packages/sdk-metrics/test/view/Aggregation.test.ts b/packages/sdk-metrics/test/view/Aggregation.test.ts index 1ca9332e44..e4ef18f52c 100644 --- a/packages/sdk-metrics/test/view/Aggregation.test.ts +++ b/packages/sdk-metrics/test/view/Aggregation.test.ts @@ -22,7 +22,10 @@ import { LastValueAggregator, SumAggregator, } from '../../src/aggregator'; -import { InstrumentDescriptor, InstrumentType } from '../../src/InstrumentDescriptor'; +import { + InstrumentDescriptor, + InstrumentType, +} from '../../src/InstrumentDescriptor'; import { Aggregation, DefaultAggregation, @@ -35,22 +38,23 @@ import { import { defaultInstrumentDescriptor } from '../util'; interface AggregationConstructor { - new(...args: any[]): Aggregation; + new (...args: any[]): Aggregation; } interface AggregatorConstructor { - new(...args: any[]): Aggregator; + new (...args: any[]): Aggregator; } describe('Aggregation', () => { it('static aggregations', () => { - const staticMembers: [keyof typeof Aggregation, AggregationConstructor][] = [ - ['Drop', DropAggregation], - ['Sum', SumAggregation], - ['LastValue', LastValueAggregation], - ['Histogram', HistogramAggregation], - ['Default', DefaultAggregation], - ]; + const staticMembers: [keyof typeof Aggregation, AggregationConstructor][] = + [ + ['Drop', DropAggregation], + ['Sum', SumAggregation], + ['LastValue', LastValueAggregation], + ['Histogram', HistogramAggregation], + ['Default', DefaultAggregation], + ]; for (const [key, type] of staticMembers) { const aggregation = (Aggregation[key] as () => Aggregation)(); @@ -65,19 +69,58 @@ describe('DefaultAggregation', () => { it('should create aggregators for instrument descriptors', () => { // https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#default-aggregation const expectations: [InstrumentDescriptor, AggregatorConstructor][] = [ - [{ ...defaultInstrumentDescriptor, type: InstrumentType.COUNTER }, SumAggregator], - [{ ...defaultInstrumentDescriptor, type: InstrumentType.OBSERVABLE_COUNTER }, SumAggregator], - [{ ...defaultInstrumentDescriptor, type: InstrumentType.UP_DOWN_COUNTER }, SumAggregator], - [{ ...defaultInstrumentDescriptor, type: InstrumentType.OBSERVABLE_UP_DOWN_COUNTER }, SumAggregator], - [{ ...defaultInstrumentDescriptor, type: InstrumentType.OBSERVABLE_GAUGE }, LastValueAggregator], - [{ ...defaultInstrumentDescriptor, type: InstrumentType.HISTOGRAM }, HistogramAggregator], + [ + { ...defaultInstrumentDescriptor, type: InstrumentType.COUNTER }, + SumAggregator, + ], + [ + { + ...defaultInstrumentDescriptor, + type: InstrumentType.OBSERVABLE_COUNTER, + }, + SumAggregator, + ], + [ + { + ...defaultInstrumentDescriptor, + type: InstrumentType.UP_DOWN_COUNTER, + }, + SumAggregator, + ], + [ + { + ...defaultInstrumentDescriptor, + type: InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, + }, + SumAggregator, + ], + [ + { + ...defaultInstrumentDescriptor, + type: InstrumentType.OBSERVABLE_GAUGE, + }, + LastValueAggregator, + ], + [ + { ...defaultInstrumentDescriptor, type: InstrumentType.HISTOGRAM }, + HistogramAggregator, + ], // unknown instrument type - [{ ...defaultInstrumentDescriptor, type: -1 as unknown as InstrumentType }, DropAggregator], + [ + { + ...defaultInstrumentDescriptor, + type: -1 as unknown as InstrumentType, + }, + DropAggregator, + ], ]; const aggregation = new DefaultAggregation(); for (const [instrumentDescriptor, type] of expectations) { - assert(aggregation.createAggregator(instrumentDescriptor) instanceof type, `${InstrumentType[instrumentDescriptor.type]}`); + assert( + aggregation.createAggregator(instrumentDescriptor) instanceof type, + `${InstrumentType[instrumentDescriptor.type]}` + ); } }); }); @@ -86,9 +129,14 @@ describe('DefaultAggregation', () => { describe('HistogramAggregator', () => { describe('createAggregator', () => { it('should create histogram aggregators with boundaries', () => { - const aggregator = new HistogramAggregation().createAggregator(defaultInstrumentDescriptor); + const aggregator = new HistogramAggregation().createAggregator( + defaultInstrumentDescriptor + ); assert(aggregator instanceof HistogramAggregator); - assert.deepStrictEqual(aggregator['_boundaries'], [0, 5, 10, 25, 50, 75, 100, 250, 500, 1000]); + assert.deepStrictEqual( + aggregator['_boundaries'], + [0, 5, 10, 25, 50, 75, 100, 250, 500, 1000] + ); }); }); }); @@ -117,36 +165,48 @@ describe('ExplicitBucketHistogramAggregation', () => { describe('createAggregator', () => { it('should create histogram aggregators with boundaries', () => { - const aggregator1 = new ExplicitBucketHistogramAggregation([100, 10, 1]).createAggregator(defaultInstrumentDescriptor); + const aggregator1 = new ExplicitBucketHistogramAggregation([ + 100, 10, 1, + ]).createAggregator(defaultInstrumentDescriptor); assert(aggregator1 instanceof HistogramAggregator); assert.deepStrictEqual(aggregator1['_boundaries'], [1, 10, 100]); - const aggregator2 = new ExplicitBucketHistogramAggregation( - [-Infinity, -Infinity, 10, 100, 1000, Infinity, Infinity] - ).createAggregator(defaultInstrumentDescriptor); + const aggregator2 = new ExplicitBucketHistogramAggregation([ + -Infinity, + -Infinity, + 10, + 100, + 1000, + Infinity, + Infinity, + ]).createAggregator(defaultInstrumentDescriptor); assert(aggregator2 instanceof HistogramAggregator); assert.deepStrictEqual(aggregator2['_boundaries'], [10, 100, 1000]); }); describe('should create histogram aggregators', () => { it('with min/max recording by default', () => { - const aggregator = new ExplicitBucketHistogramAggregation([100, 10, 1]) - .createAggregator(defaultInstrumentDescriptor); + const aggregator = new ExplicitBucketHistogramAggregation([ + 100, 10, 1, + ]).createAggregator(defaultInstrumentDescriptor); assert.deepStrictEqual(aggregator['_recordMinMax'], true); }); it('with min/max recording when _recordMinMax is set to true', () => { - const aggregator = new ExplicitBucketHistogramAggregation([100, 10, 1], true) - .createAggregator(defaultInstrumentDescriptor); + const aggregator = new ExplicitBucketHistogramAggregation( + [100, 10, 1], + true + ).createAggregator(defaultInstrumentDescriptor); assert.deepStrictEqual(aggregator['_recordMinMax'], true); }); it('without min/max recording when _recordMinMax is set to true', () => { - const aggregator = new ExplicitBucketHistogramAggregation([100, 10, 1], false) - .createAggregator(defaultInstrumentDescriptor); + const aggregator = new ExplicitBucketHistogramAggregation( + [100, 10, 1], + false + ).createAggregator(defaultInstrumentDescriptor); assert.deepStrictEqual(aggregator['_recordMinMax'], false); }); - }); }); }); diff --git a/packages/sdk-metrics/test/view/AttributesProcessor.test.ts b/packages/sdk-metrics/test/view/AttributesProcessor.test.ts index 1a0db429e5..8d38efacdf 100644 --- a/packages/sdk-metrics/test/view/AttributesProcessor.test.ts +++ b/packages/sdk-metrics/test/view/AttributesProcessor.test.ts @@ -35,21 +35,24 @@ describe('NoopAttributesProcessor', () => { describe('FilteringAttributesProcessor', () => { it('should not add keys when attributes do not exist', () => { const processor = new FilteringAttributesProcessor(['foo', 'bar']); - assert.deepStrictEqual( - processor.process({}, context.active()), {}); + assert.deepStrictEqual(processor.process({}, context.active()), {}); }); it('should only keep allowed attributes', () => { const processor = new FilteringAttributesProcessor(['foo', 'bar']); assert.deepStrictEqual( - processor.process({ - foo: 'fooValue', - bar: 'barValue', - baz: 'bazValue' - }, context.active()), + processor.process( + { + foo: 'fooValue', + bar: 'barValue', + baz: 'bazValue', + }, + context.active() + ), { foo: 'fooValue', - bar: 'barValue' - }); + bar: 'barValue', + } + ); }); }); diff --git a/packages/sdk-metrics/test/view/Predicate.test.ts b/packages/sdk-metrics/test/view/Predicate.test.ts index 608ff20a17..34e6dc364d 100644 --- a/packages/sdk-metrics/test/view/Predicate.test.ts +++ b/packages/sdk-metrics/test/view/Predicate.test.ts @@ -59,7 +59,10 @@ describe('PatternPredicate', () => { describe('escapePattern', () => { it('should escape regexp elements', () => { - assert.strictEqual(PatternPredicate.escapePattern('^$\\.+?()[]{}|'), '^\\^\\$\\\\\\.\\+\\?\\(\\)\\[\\]\\{\\}\\|$'); + assert.strictEqual( + PatternPredicate.escapePattern('^$\\.+?()[]{}|'), + '^\\^\\$\\\\\\.\\+\\?\\(\\)\\[\\]\\{\\}\\|$' + ); assert.strictEqual(PatternPredicate.escapePattern('*'), '^.*$'); assert.strictEqual(PatternPredicate.escapePattern('foobar'), '^foobar$'); assert.strictEqual(PatternPredicate.escapePattern('foo*'), '^foo.*$'); diff --git a/packages/sdk-metrics/test/view/View.test.ts b/packages/sdk-metrics/test/view/View.test.ts index d6ad05da8b..921928cf1f 100644 --- a/packages/sdk-metrics/test/view/View.test.ts +++ b/packages/sdk-metrics/test/view/View.test.ts @@ -17,24 +17,34 @@ import * as assert from 'assert'; import { AttributesProcessor } from '../../src/view/AttributesProcessor'; import { View } from '../../src/view/View'; -import { InstrumentType, Aggregation, ExplicitBucketHistogramAggregation } from '../../src'; +import { + InstrumentType, + Aggregation, + ExplicitBucketHistogramAggregation, +} from '../../src'; describe('View', () => { describe('constructor', () => { it('should construct default view with no view arguments provided', () => { { - const view = new View({instrumentName: '*'}); + const view = new View({ instrumentName: '*' }); assert.strictEqual(view.name, undefined); assert.strictEqual(view.description, undefined); assert.strictEqual(view.aggregation, Aggregation.Default()); - assert.strictEqual(view.attributesProcessor, AttributesProcessor.Noop()); + assert.strictEqual( + view.attributesProcessor, + AttributesProcessor.Noop() + ); } { - const view = new View({meterName: '*'}); + const view = new View({ meterName: '*' }); assert.strictEqual(view.name, undefined); assert.strictEqual(view.description, undefined); assert.strictEqual(view.aggregation, Aggregation.Default()); - assert.strictEqual(view.attributesProcessor, AttributesProcessor.Noop()); + assert.strictEqual( + view.attributesProcessor, + AttributesProcessor.Noop() + ); } }); @@ -48,36 +58,51 @@ describe('View', () => { // would implicitly rename all instruments to description assert.throws(() => new View({ description: 'description' })); // would implicitly change all instruments to use histogram aggregation - assert.throws(() => new View({ - aggregation: new ExplicitBucketHistogramAggregation([1, 100]) - })); + assert.throws( + () => + new View({ + aggregation: new ExplicitBucketHistogramAggregation([1, 100]), + }) + ); }); it('with named view and no instrument selector should throw', () => { - assert.throws(() => new View({ - name: 'named-view' - })); + assert.throws( + () => + new View({ + name: 'named-view', + }) + ); }); it('with named view and instrument wildcard should throw', () => { // Throws with wildcard character only. - assert.throws(() => new View({ - name: 'renamed-instrument', - instrumentName: '*' - })); + assert.throws( + () => + new View({ + name: 'renamed-instrument', + instrumentName: '*', + }) + ); // Throws with wildcard character in instrument name. - assert.throws(() => new View({ - name: 'renamed-instrument', - instrumentName: 'instrument.name.*' - })); + assert.throws( + () => + new View({ + name: 'renamed-instrument', + instrumentName: 'instrument.name.*', + }) + ); }); it('with named view and instrument type selector should throw', () => { - assert.throws(() => new View({ - name: 'renamed-instrument', - instrumentType: InstrumentType.COUNTER - })); + assert.throws( + () => + new View({ + name: 'renamed-instrument', + instrumentType: InstrumentType.COUNTER, + }) + ); }); }); }); diff --git a/packages/sdk-metrics/test/view/ViewRegistry.test.ts b/packages/sdk-metrics/test/view/ViewRegistry.test.ts index 68f9f27072..aea2ebe3cf 100644 --- a/packages/sdk-metrics/test/view/ViewRegistry.test.ts +++ b/packages/sdk-metrics/test/view/ViewRegistry.test.ts @@ -17,10 +17,12 @@ import * as assert from 'assert'; import { InstrumentType } from '../../src'; import { ViewRegistry } from '../../src/view/ViewRegistry'; -import { defaultInstrumentationScope, defaultInstrumentDescriptor } from '../util'; +import { + defaultInstrumentationScope, + defaultInstrumentDescriptor, +} from '../util'; import { View } from '../../src'; - describe('ViewRegistry', () => { describe('findViews', () => { describe('InstrumentSelector', () => { @@ -30,20 +32,26 @@ describe('ViewRegistry', () => { registry.addView(new View({ name: 'bar', instrumentName: 'bar' })); { - const views = registry.findViews({ - ...defaultInstrumentDescriptor, - name: 'foo' - }, defaultInstrumentationScope); + const views = registry.findViews( + { + ...defaultInstrumentDescriptor, + name: 'foo', + }, + defaultInstrumentationScope + ); assert.strictEqual(views.length, 1); assert.strictEqual(views[0].name, 'foo'); } { - const views = registry.findViews({ - ...defaultInstrumentDescriptor, - name: 'bar' - }, defaultInstrumentationScope); + const views = registry.findViews( + { + ...defaultInstrumentDescriptor, + name: 'bar', + }, + defaultInstrumentationScope + ); assert.strictEqual(views.length, 1); assert.strictEqual(views[0].name, 'bar'); @@ -52,32 +60,42 @@ describe('ViewRegistry', () => { it('should match view with instrument type', () => { const registry = new ViewRegistry(); - registry.addView(new View({ - name: 'counter', - instrumentName: 'default_metric', - instrumentType: InstrumentType.COUNTER - })); - registry.addView(new View({ - name: 'histogram', - instrumentName: 'default_metric', - instrumentType: InstrumentType.HISTOGRAM - })); + registry.addView( + new View({ + name: 'counter', + instrumentName: 'default_metric', + instrumentType: InstrumentType.COUNTER, + }) + ); + registry.addView( + new View({ + name: 'histogram', + instrumentName: 'default_metric', + instrumentType: InstrumentType.HISTOGRAM, + }) + ); { - const views = registry.findViews({ - ...defaultInstrumentDescriptor, - type: InstrumentType.COUNTER - }, defaultInstrumentationScope); + const views = registry.findViews( + { + ...defaultInstrumentDescriptor, + type: InstrumentType.COUNTER, + }, + defaultInstrumentationScope + ); assert.strictEqual(views.length, 1); assert.strictEqual(views[0].name, 'counter'); } { - const views = registry.findViews({ - ...defaultInstrumentDescriptor, - type: InstrumentType.HISTOGRAM - }, defaultInstrumentationScope); + const views = registry.findViews( + { + ...defaultInstrumentDescriptor, + type: InstrumentType.HISTOGRAM, + }, + defaultInstrumentationScope + ); assert.strictEqual(views.length, 1); assert.strictEqual(views[0].name, 'histogram'); @@ -88,8 +106,20 @@ describe('ViewRegistry', () => { describe('MeterSelector', () => { it('should match view with meter name', () => { const registry = new ViewRegistry(); - registry.addView(new View({ name: 'foo', instrumentName: 'default_metric', meterName: 'foo' })); - registry.addView(new View({ name: 'bar', instrumentName: 'default_metric', meterName: 'bar' })); + registry.addView( + new View({ + name: 'foo', + instrumentName: 'default_metric', + meterName: 'foo', + }) + ); + registry.addView( + new View({ + name: 'bar', + instrumentName: 'default_metric', + meterName: 'bar', + }) + ); { const views = registry.findViews(defaultInstrumentDescriptor, { @@ -104,7 +134,7 @@ describe('ViewRegistry', () => { { const views = registry.findViews(defaultInstrumentDescriptor, { ...defaultInstrumentationScope, - name: 'bar' + name: 'bar', }); assert.strictEqual(views.length, 1); diff --git a/prettier.config.js b/prettier.config.js new file mode 100644 index 0000000000..ab9553d533 --- /dev/null +++ b/prettier.config.js @@ -0,0 +1,8 @@ +module.exports = { + arrowParens: 'avoid', + printWidth: 80, + trailingComma: 'es5', + tabWidth: 2, + semi: true, + singleQuote: true, +}; From 91f94a81bca6fc713d971c37003b3866dcfe6edb Mon Sep 17 00:00:00 2001 From: Marc Pichler Date: Sun, 4 Dec 2022 17:11:51 +0100 Subject: [PATCH 2/2] docs(otlp-*-exporter-base): add internal use note (#3453) --- experimental/packages/otlp-exporter-base/README.md | 2 ++ experimental/packages/otlp-grpc-exporter-base/README.md | 2 ++ experimental/packages/otlp-proto-exporter-base/README.md | 2 ++ 3 files changed, 6 insertions(+) diff --git a/experimental/packages/otlp-exporter-base/README.md b/experimental/packages/otlp-exporter-base/README.md index 85f96aecd8..2ec8baf1e8 100644 --- a/experimental/packages/otlp-exporter-base/README.md +++ b/experimental/packages/otlp-exporter-base/README.md @@ -3,6 +3,8 @@ [![NPM Published Version][npm-img]][npm-url] [![Apache License][license-image]][license-image] +**Note: This package is intended for internal use only.** + **Note: This is an experimental package under active development. New releases may include breaking changes.** This module provides a base exporter for web and node to be used with [opentelemetry-collector][opentelemetry-collector-url]. diff --git a/experimental/packages/otlp-grpc-exporter-base/README.md b/experimental/packages/otlp-grpc-exporter-base/README.md index d49ece2a36..4ae30015fa 100644 --- a/experimental/packages/otlp-grpc-exporter-base/README.md +++ b/experimental/packages/otlp-grpc-exporter-base/README.md @@ -3,6 +3,8 @@ [![NPM Published Version][npm-img]][npm-url] [![Apache License][license-image]][license-image] +**Note: This package is intended for internal use only.** + **Note: This is an experimental package under active development. New releases may include breaking changes.** This module provides a gRPC exporter base for Node.js (browsers not supported) to be used with [opentelemetry-collector][opentelemetry-collector-url]. diff --git a/experimental/packages/otlp-proto-exporter-base/README.md b/experimental/packages/otlp-proto-exporter-base/README.md index 84cd880dcd..fa4cd9032d 100644 --- a/experimental/packages/otlp-proto-exporter-base/README.md +++ b/experimental/packages/otlp-proto-exporter-base/README.md @@ -3,6 +3,8 @@ [![NPM Published Version][npm-img]][npm-url] [![Apache License][license-image]][license-image] +**Note: This package is intended for internal use only.** + **Note: This is an experimental package under active development. New releases may include breaking changes.** This module provides a OTLP-http/protobuf exporter base for Node.js (browsers not supported) to be used with [opentelemetry-collector][opentelemetry-collector-url].