diff --git a/consumer/pdata/metric.go b/consumer/pdata/metric.go index b9b808d8c7b..696c04bf751 100644 --- a/consumer/pdata/metric.go +++ b/consumer/pdata/metric.go @@ -15,30 +15,10 @@ package pdata import ( - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlpcommon "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" + otlpresource "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/resource/v1" ) -type MetricType otlpmetrics.MetricDescriptor_Type - -const ( - MetricTypeInvalid = MetricType(otlpmetrics.MetricDescriptor_INVALID_TYPE) - MetricTypeInt64 = MetricType(otlpmetrics.MetricDescriptor_INT64) - MetricTypeDouble = MetricType(otlpmetrics.MetricDescriptor_DOUBLE) - MetricTypeMonotonicInt64 = MetricType(otlpmetrics.MetricDescriptor_MONOTONIC_INT64) - MetricTypeMonotonicDouble = MetricType(otlpmetrics.MetricDescriptor_MONOTONIC_DOUBLE) - MetricTypeHistogram = MetricType(otlpmetrics.MetricDescriptor_HISTOGRAM) - MetricTypeSummary = MetricType(otlpmetrics.MetricDescriptor_SUMMARY) -) - -func (mt MetricType) String() string { - return otlpmetrics.MetricDescriptor_Type(mt).String() -} - -// InternalNewMetricsResourceSlice is a helper -func InternalNewMetricsResourceSlice(orig *[]*otlpmetrics.ResourceMetrics) ResourceMetricsSlice { - return newResourceMetricsSlice(orig) -} - // Metrics is an opaque interface that allows transition to the new internal Metrics data, but also facilitate the // transition to the new components especially for traces. // @@ -49,3 +29,18 @@ func InternalNewMetricsResourceSlice(orig *[]*otlpmetrics.ResourceMetrics) Resou type Metrics struct { InternalOpaque interface{} } + +// DeprecatedNewResource temporary public function. +func DeprecatedNewResource(orig **otlpresource.Resource) Resource { + return newResource(orig) +} + +// DeprecatedNewInstrumentationLibrary temporary public function. +func DeprecatedNewInstrumentationLibrary(orig **otlpcommon.InstrumentationLibrary) InstrumentationLibrary { + return newInstrumentationLibrary(orig) +} + +// DeprecatedNewStringMap temporary public function. +func DeprecatedNewStringMap(orig *[]*otlpcommon.StringKeyValue) StringMap { + return newStringMap(orig) +} diff --git a/consumer/pdata/metric_test.go b/consumer/pdata/metric_test.go deleted file mode 100644 index c2223f486a8..00000000000 --- a/consumer/pdata/metric_test.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pdata - -import ( - "testing" - - gogoproto "github.com/gogo/protobuf/proto" - "github.com/stretchr/testify/assert" - goproto "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/emptypb" - - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" -) - -func TestResourceMetricsWireCompatibility(t *testing.T) { - // This test verifies that OTLP ProtoBufs generated using goproto lib in - // opentelemetry-proto repository OTLP ProtoBufs generated using gogoproto lib in - // this repository are wire compatible. - - // Generate ResourceMetrics as pdata struct. - pdataRM := generateTestResourceMetrics() - - // Marshal its underlying ProtoBuf to wire. - wire1, err := gogoproto.Marshal(*pdataRM.orig) - assert.NoError(t, err) - assert.NotNil(t, wire1) - - // Unmarshal from the wire to OTLP Protobuf in goproto's representation. - var goprotoMessage emptypb.Empty - err = goproto.Unmarshal(wire1, &goprotoMessage) - assert.NoError(t, err) - - // Marshal to the wire again. - wire2, err := goproto.Marshal(&goprotoMessage) - assert.NoError(t, err) - assert.NotNil(t, wire2) - - // Unmarshal from the wire into gogoproto's representation. - var gogoprotoRM otlpmetrics.ResourceMetrics - err = gogoproto.Unmarshal(wire2, &gogoprotoRM) - assert.NoError(t, err) - - // Now compare that the original and final ProtoBuf messages are the same. - // This proves that goproto and gogoproto marshaling/unmarshaling are wire compatible. - assert.True(t, gogoproto.Equal(*pdataRM.orig, &gogoprotoRM)) -} diff --git a/consumer/pdatautil/pdatautil.go b/consumer/pdatautil/pdatautil.go index 2f25f93d207..6b14ce1263f 100644 --- a/consumer/pdatautil/pdatautil.go +++ b/consumer/pdatautil/pdatautil.go @@ -24,7 +24,7 @@ import ( "go.opentelemetry.io/collector/consumer/consumerdata" "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/translator/internaldata" ) @@ -35,7 +35,7 @@ func MetricsToMetricsData(md pdata.Metrics) []consumerdata.MetricsData { if cmd, ok := md.InternalOpaque.([]consumerdata.MetricsData); ok { return cmd } - if ims, ok := md.InternalOpaque.(data.MetricData); ok { + if ims, ok := md.InternalOpaque.(dataold.MetricData); ok { return internaldata.MetricDataToOC(ims) } panic("Unsupported metrics type.") @@ -51,8 +51,8 @@ func MetricsFromMetricsData(ocmds []consumerdata.MetricsData) pdata.Metrics { // MetricsToInternalMetrics returns the `data.MetricData` representation of the `pdata.Metrics`. // // This is a temporary function that will be removed when the new internal pdata.Metrics will be finalized. -func MetricsToInternalMetrics(md pdata.Metrics) data.MetricData { - if ims, ok := md.InternalOpaque.(data.MetricData); ok { +func MetricsToInternalMetrics(md pdata.Metrics) dataold.MetricData { + if ims, ok := md.InternalOpaque.(dataold.MetricData); ok { return ims } if cmd, ok := md.InternalOpaque.([]consumerdata.MetricsData); ok { @@ -64,7 +64,7 @@ func MetricsToInternalMetrics(md pdata.Metrics) data.MetricData { // MetricsFromMetricsData returns the `pdata.Metrics` representation of the `data.MetricData`. // // This is a temporary function that will be removed when the new internal pdata.Metrics will be finalized. -func MetricsFromInternalMetrics(md data.MetricData) pdata.Metrics { +func MetricsFromInternalMetrics(md dataold.MetricData) pdata.Metrics { return pdata.Metrics{InternalOpaque: md} } @@ -72,7 +72,7 @@ func MetricsFromInternalMetrics(md data.MetricData) pdata.Metrics { // // This is a temporary function that will be removed when the new internal pdata.Metrics will be finalized. func CloneMetrics(md pdata.Metrics) pdata.Metrics { - if ims, ok := md.InternalOpaque.(data.MetricData); ok { + if ims, ok := md.InternalOpaque.(dataold.MetricData); ok { return pdata.Metrics{InternalOpaque: ims.Clone()} } if ocmds, ok := md.InternalOpaque.([]consumerdata.MetricsData); ok { @@ -86,7 +86,7 @@ func CloneMetrics(md pdata.Metrics) pdata.Metrics { } func MetricCount(md pdata.Metrics) int { - if ims, ok := md.InternalOpaque.(data.MetricData); ok { + if ims, ok := md.InternalOpaque.(dataold.MetricData); ok { return ims.MetricCount() } if ocmds, ok := md.InternalOpaque.([]consumerdata.MetricsData); ok { @@ -100,7 +100,7 @@ func MetricCount(md pdata.Metrics) int { } func MetricAndDataPointCount(md pdata.Metrics) (int, int) { - if ims, ok := md.InternalOpaque.(data.MetricData); ok { + if ims, ok := md.InternalOpaque.(dataold.MetricData); ok { return ims.MetricAndDataPointCount() } if ocmds, ok := md.InternalOpaque.([]consumerdata.MetricsData); ok { diff --git a/consumer/pdatautil/pdatautil_test.go b/consumer/pdatautil/pdatautil_test.go index d8eb3257be2..db7cf6f41c5 100644 --- a/consumer/pdatautil/pdatautil_test.go +++ b/consumer/pdatautil/pdatautil_test.go @@ -24,11 +24,11 @@ import ( "go.opentelemetry.io/collector/consumer/consumerdata" "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestMetricCount(t *testing.T) { - metrics := pdata.Metrics{InternalOpaque: testdata.GenerateMetricDataTwoMetrics()} + metrics := pdata.Metrics{InternalOpaque: testdataold.GenerateMetricDataTwoMetrics()} assert.Equal(t, 2, MetricCount(metrics)) metrics = pdata.Metrics{InternalOpaque: []consumerdata.MetricsData{ @@ -50,7 +50,7 @@ func TestMetricCount(t *testing.T) { } func TestMetricAndDataPointCount(t *testing.T) { - metrics := pdata.Metrics{InternalOpaque: testdata.GenerateMetricDataTwoMetrics()} + metrics := pdata.Metrics{InternalOpaque: testdataold.GenerateMetricDataTwoMetrics()} metricsCount, dataPointsCount := MetricAndDataPointCount(metrics) assert.Equal(t, 2, metricsCount) assert.Equal(t, 4, dataPointsCount) diff --git a/exporter/exporterhelper/metricshelper_test.go b/exporter/exporterhelper/metricshelper_test.go index 990e92226f3..936d30d0fc5 100644 --- a/exporter/exporterhelper/metricshelper_test.go +++ b/exporter/exporterhelper/metricshelper_test.go @@ -28,6 +28,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/obsreport" "go.opentelemetry.io/collector/obsreport/obsreporttest" ) @@ -46,7 +47,7 @@ var ( ) func TestMetricsRequest(t *testing.T) { - mr := newMetricsRequest(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataEmpty()), nil) + mr := newMetricsRequest(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataEmpty()), nil) partialErr := consumererror.PartialTracesError(errors.New("some error"), testdata.GenerateTraceDataOneSpan()) assert.Same(t, mr, mr.onPartialError(partialErr.(consumererror.PartialError))) @@ -66,7 +67,7 @@ func TestMetricsExporter_NilPushMetricsData(t *testing.T) { } func TestMetricsExporter_Default(t *testing.T) { - md := testdata.GenerateMetricDataEmpty() + md := testdataold.GenerateMetricDataEmpty() me, err := NewMetricsExporter(fakeMetricsExporterConfig, newPushMetricsData(0, nil)) assert.NotNil(t, me) assert.NoError(t, err) @@ -76,7 +77,7 @@ func TestMetricsExporter_Default(t *testing.T) { } func TestMetricsExporter_Default_ReturnError(t *testing.T) { - md := testdata.GenerateMetricDataEmpty() + md := testdataold.GenerateMetricDataEmpty() want := errors.New("my_error") me, err := NewMetricsExporter(fakeMetricsExporterConfig, newPushMetricsData(0, want)) require.Nil(t, err) @@ -165,7 +166,7 @@ func checkRecordedMetricsForMetricsExporter(t *testing.T, me component.MetricsEx require.NoError(t, err) defer doneFn() - md := testdata.GenerateMetricDataTwoMetrics() + md := testdataold.GenerateMetricDataTwoMetrics() const numBatches = 7 for i := 0; i < numBatches; i++ { require.Equal(t, wantError, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(md))) @@ -181,7 +182,7 @@ func checkRecordedMetricsForMetricsExporter(t *testing.T, me component.MetricsEx } func generateMetricsTraffic(t *testing.T, me component.MetricsExporter, numRequests int, wantError error) { - md := testdata.GenerateMetricDataOneMetricOneDataPoint() + md := testdataold.GenerateMetricDataOneMetricOneDataPoint() ctx, span := trace.StartSpan(context.Background(), fakeMetricsParentSpanName, trace.WithSampler(trace.AlwaysSample())) defer span.End() for i := 0; i < numRequests; i++ { diff --git a/exporter/exportertest/nop_exporter_test.go b/exporter/exportertest/nop_exporter_test.go index c985184833c..5ea1e5ad25a 100644 --- a/exporter/exportertest/nop_exporter_test.go +++ b/exporter/exportertest/nop_exporter_test.go @@ -21,7 +21,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) func TestNopTraceExporter(t *testing.T) { @@ -34,7 +34,7 @@ func TestNopTraceExporter(t *testing.T) { func TestNopMetricsExporter(t *testing.T) { nme := NewNopMetricsExporter() require.NoError(t, nme.Start(context.Background(), nil)) - require.NoError(t, nme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(data.NewMetricData()))) + require.NoError(t, nme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(dataold.NewMetricData()))) require.NoError(t, nme.Shutdown(context.Background())) } diff --git a/exporter/exportertest/sink_exporter_test.go b/exporter/exportertest/sink_exporter_test.go index 127c2f7490f..6348d74d3cc 100644 --- a/exporter/exportertest/sink_exporter_test.go +++ b/exporter/exportertest/sink_exporter_test.go @@ -25,6 +25,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestSinkTraceExporter(t *testing.T) { @@ -58,7 +59,7 @@ func TestSinkTraceExporter_Error(t *testing.T) { func TestSinkMetricsExporter(t *testing.T) { sink := new(SinkMetricsExporter) require.NoError(t, sink.Start(context.Background(), componenttest.NewNopHost())) - md := testdata.GenerateMetricDataOneMetric() + md := testdataold.GenerateMetricDataOneMetric() want := make([]pdata.Metrics, 0, 7) for i := 0; i < 7; i++ { require.NoError(t, sink.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(md))) @@ -76,7 +77,7 @@ func TestSinkMetricsExporter_Error(t *testing.T) { sink := new(SinkMetricsExporter) require.NoError(t, sink.Start(context.Background(), componenttest.NewNopHost())) sink.SetConsumeMetricsError(errors.New("my error")) - md := testdata.GenerateMetricDataOneMetric() + md := testdataold.GenerateMetricDataOneMetric() require.Error(t, sink.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(md))) assert.Len(t, sink.AllMetrics(), 0) assert.Equal(t, 0, sink.MetricsCount()) diff --git a/exporter/fileexporter/file_exporter.go b/exporter/fileexporter/file_exporter.go index e8d57028e51..88f06acda37 100644 --- a/exporter/fileexporter/file_exporter.go +++ b/exporter/fileexporter/file_exporter.go @@ -25,10 +25,10 @@ import ( "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" otlplogs "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/logs/v1" otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/metrics/v1" otlptrace "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/trace/v1" + "go.opentelemetry.io/collector/internal/dataold" ) // Marshaler configuration used for marhsaling Protobuf to JSON. Use default config. @@ -50,7 +50,7 @@ func (e *fileExporter) ConsumeTraces(_ context.Context, td pdata.Traces) error { func (e *fileExporter) ConsumeMetrics(_ context.Context, md pdata.Metrics) error { request := otlpmetrics.ExportMetricsServiceRequest{ - ResourceMetrics: data.MetricDataToOtlp(pdatautil.MetricsToInternalMetrics(md)), + ResourceMetrics: dataold.MetricDataToOtlp(pdatautil.MetricsToInternalMetrics(md)), } return exportMessageAsLine(e, &request) } diff --git a/exporter/fileexporter/file_exporter_test.go b/exporter/fileexporter/file_exporter_test.go index 15d91f0ab28..85f6d3f244f 100644 --- a/exporter/fileexporter/file_exporter_test.go +++ b/exporter/fileexporter/file_exporter_test.go @@ -24,7 +24,6 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" collectorlogs "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/logs/v1" collectormetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/metrics/v1" collectortrace "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/trace/v1" @@ -32,6 +31,8 @@ import ( logspb "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/logs/v1" otresourcepb "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/resource/v1" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/testutil" ) @@ -57,7 +58,7 @@ func TestFileMetricsExporterNoErrors(t *testing.T) { lme := &fileExporter{file: mf} require.NotNil(t, lme) - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataTwoMetrics()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataTwoMetrics()) assert.NoError(t, lme.ConsumeMetrics(context.Background(), md)) assert.NoError(t, lme.Shutdown(context.Background())) @@ -65,7 +66,7 @@ func TestFileMetricsExporterNoErrors(t *testing.T) { var j collectormetrics.ExportMetricsServiceRequest assert.NoError(t, unmarshaler.Unmarshal(mf, &j)) - assert.EqualValues(t, data.MetricDataToOtlp(pdatautil.MetricsToInternalMetrics(md)), j.ResourceMetrics) + assert.EqualValues(t, dataold.MetricDataToOtlp(pdatautil.MetricsToInternalMetrics(md)), j.ResourceMetrics) } func TestFileLogsExporterNoErrors(t *testing.T) { diff --git a/exporter/loggingexporter/logging_exporter.go b/exporter/loggingexporter/logging_exporter.go index 7a52f0bd875..6fa214851c4 100644 --- a/exporter/loggingexporter/logging_exporter.go +++ b/exporter/loggingexporter/logging_exporter.go @@ -29,6 +29,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exporterhelper" + "go.opentelemetry.io/collector/internal/dataold" ) type logDataBuffer struct { @@ -73,7 +74,7 @@ func (b *logDataBuffer) logInstrumentationLibrary(il pdata.InstrumentationLibrar il.Version()) } -func (b *logDataBuffer) logMetricDescriptor(md pdata.MetricDescriptor) { +func (b *logDataBuffer) logMetricDescriptor(md dataold.MetricDescriptor) { if md.IsNil() { return } @@ -85,31 +86,31 @@ func (b *logDataBuffer) logMetricDescriptor(md pdata.MetricDescriptor) { b.logEntry(" -> Type: %s", md.Type().String()) } -func (b *logDataBuffer) logMetricDataPoints(m pdata.Metric) { +func (b *logDataBuffer) logMetricDataPoints(m dataold.Metric) { md := m.MetricDescriptor() if md.IsNil() { return } switch md.Type() { - case pdata.MetricTypeInvalid: + case dataold.MetricTypeInvalid: return - case pdata.MetricTypeInt64: + case dataold.MetricTypeInt64: b.logInt64DataPoints(m.Int64DataPoints()) - case pdata.MetricTypeDouble: + case dataold.MetricTypeDouble: b.logDoubleDataPoints(m.DoubleDataPoints()) - case pdata.MetricTypeMonotonicInt64: + case dataold.MetricTypeMonotonicInt64: b.logInt64DataPoints(m.Int64DataPoints()) - case pdata.MetricTypeMonotonicDouble: + case dataold.MetricTypeMonotonicDouble: b.logDoubleDataPoints(m.DoubleDataPoints()) - case pdata.MetricTypeHistogram: + case dataold.MetricTypeHistogram: b.logHistogramDataPoints(m.HistogramDataPoints()) - case pdata.MetricTypeSummary: + case dataold.MetricTypeSummary: b.logSummaryDataPoints(m.SummaryDataPoints()) } } -func (b *logDataBuffer) logInt64DataPoints(ps pdata.Int64DataPointSlice) { +func (b *logDataBuffer) logInt64DataPoints(ps dataold.Int64DataPointSlice) { for i := 0; i < ps.Len(); i++ { p := ps.At(i) if p.IsNil() { @@ -125,7 +126,7 @@ func (b *logDataBuffer) logInt64DataPoints(ps pdata.Int64DataPointSlice) { } } -func (b *logDataBuffer) logDoubleDataPoints(ps pdata.DoubleDataPointSlice) { +func (b *logDataBuffer) logDoubleDataPoints(ps dataold.DoubleDataPointSlice) { for i := 0; i < ps.Len(); i++ { p := ps.At(i) if p.IsNil() { @@ -141,7 +142,7 @@ func (b *logDataBuffer) logDoubleDataPoints(ps pdata.DoubleDataPointSlice) { } } -func (b *logDataBuffer) logHistogramDataPoints(ps pdata.HistogramDataPointSlice) { +func (b *logDataBuffer) logHistogramDataPoints(ps dataold.HistogramDataPointSlice) { for i := 0; i < ps.Len(); i++ { p := ps.At(i) if p.IsNil() { @@ -177,7 +178,7 @@ func (b *logDataBuffer) logHistogramDataPoints(ps pdata.HistogramDataPointSlice) } } -func (b *logDataBuffer) logSummaryDataPoints(ps pdata.SummaryDataPointSlice) { +func (b *logDataBuffer) logSummaryDataPoints(ps dataold.SummaryDataPointSlice) { for i := 0; i < ps.Len(); i++ { p := ps.At(i) if p.IsNil() { diff --git a/exporter/loggingexporter/logging_exporter_test.go b/exporter/loggingexporter/logging_exporter_test.go index 15e8502a154..4af556f9f93 100644 --- a/exporter/loggingexporter/logging_exporter_test.go +++ b/exporter/loggingexporter/logging_exporter_test.go @@ -24,6 +24,7 @@ import ( "go.opentelemetry.io/collector/config/configmodels" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestLoggingTraceExporterNoErrors(t *testing.T) { @@ -45,15 +46,15 @@ func TestLoggingMetricsExporterNoErrors(t *testing.T) { require.NotNil(t, lme) assert.NoError(t, err) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataEmpty()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataOneEmptyOneNilResourceMetrics()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataOneEmptyOneNilInstrumentationLibrary()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataOneMetricOneNil()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataWithCountersHistogramAndSummary()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataAllTypesNilDataPoint()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataAllTypesEmptyDataPoint()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataNilMetricDescriptor()))) - assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataMetricTypeInvalid()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataEmpty()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataOneEmptyOneNilResourceMetrics()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataOneEmptyOneNilInstrumentationLibrary()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataOneMetricOneNil()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataWithCountersHistogramAndSummary()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataAllTypesNilDataPoint()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataAllTypesEmptyDataPoint()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataNilMetricDescriptor()))) + assert.NoError(t, lme.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataMetricTypeInvalid()))) assert.NoError(t, lme.Shutdown(context.Background())) } diff --git a/exporter/opencensusexporter/opencensus_test.go b/exporter/opencensusexporter/opencensus_test.go index 7064b5ad7f6..9fa17831d9c 100644 --- a/exporter/opencensusexporter/opencensus_test.go +++ b/exporter/opencensusexporter/opencensus_test.go @@ -30,6 +30,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/receiver/opencensusreceiver" "go.opentelemetry.io/collector/testutil" ) @@ -165,7 +166,7 @@ func TestSendMetrics(t *testing.T) { assert.NoError(t, exp.Shutdown(context.Background())) }) - md := testdata.GenerateMetricDataOneMetric() + md := testdataold.GenerateMetricDataOneMetric() assert.NoError(t, exp.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(md))) testutil.WaitFor(t, func() bool { return len(sink.AllMetrics()) == 1 @@ -206,7 +207,7 @@ func TestSendMetrics_NoBackend(t *testing.T) { assert.NoError(t, exp.Shutdown(context.Background())) }) - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataOneMetric()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataOneMetric()) for i := 0; i < 10000; i++ { assert.Error(t, exp.ConsumeMetrics(context.Background(), md)) } @@ -228,6 +229,6 @@ func TestSendMetrics_AfterStop(t *testing.T) { require.NoError(t, exp.Start(context.Background(), host)) assert.NoError(t, exp.Shutdown(context.Background())) - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataOneMetric()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataOneMetric()) assert.Error(t, exp.ConsumeMetrics(context.Background(), md)) } diff --git a/exporter/otlpexporter/otlp.go b/exporter/otlpexporter/otlp.go index cfa3bc805b4..0d8e6b680a2 100644 --- a/exporter/otlpexporter/otlp.go +++ b/exporter/otlpexporter/otlp.go @@ -31,10 +31,10 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exporterhelper" - "go.opentelemetry.io/collector/internal/data" otlplogs "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/logs/v1" otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/metrics/v1" otlptrace "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/trace/v1" + "go.opentelemetry.io/collector/internal/dataold" ) type exporterImp struct { @@ -92,7 +92,7 @@ func (e *exporterImp) pushTraceData(ctx context.Context, td pdata.Traces) (int, func (e *exporterImp) pushMetricsData(ctx context.Context, md pdata.Metrics) (int, error) { imd := pdatautil.MetricsToInternalMetrics(md) request := &otlpmetrics.ExportMetricsServiceRequest{ - ResourceMetrics: data.MetricDataToOtlp(imd), + ResourceMetrics: dataold.MetricDataToOtlp(imd), } err := e.w.exportMetrics(ctx, request) diff --git a/exporter/otlpexporter/otlp_test.go b/exporter/otlpexporter/otlp_test.go index 1dc34d20d5e..5cbbcaaa275 100644 --- a/exporter/otlpexporter/otlp_test.go +++ b/exporter/otlpexporter/otlp_test.go @@ -37,6 +37,7 @@ import ( otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/metrics/v1" otlptraces "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/trace/v1" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/obsreport" "go.opentelemetry.io/collector/testutil" ) @@ -272,7 +273,7 @@ func TestSendMetrics(t *testing.T) { assert.EqualValues(t, 0, atomic.LoadInt32(&rcv.requestCount)) // Send empty trace. - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataEmpty()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataEmpty()) assert.NoError(t, exp.ConsumeMetrics(context.Background(), md)) // Wait until it is received. @@ -284,10 +285,10 @@ func TestSendMetrics(t *testing.T) { assert.EqualValues(t, 0, atomic.LoadInt32(&rcv.totalItems)) // A trace with 2 spans. - md = pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataTwoMetrics()) + md = pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataTwoMetrics()) expectedOTLPReq := &otlpmetrics.ExportMetricsServiceRequest{ - ResourceMetrics: testdata.GenerateMetricOtlpTwoMetrics(), + ResourceMetrics: testdataold.GenerateMetricOtlpTwoMetrics(), } err = exp.ConsumeMetrics(context.Background(), md) diff --git a/exporter/prometheusremotewriteexporter/helper.go b/exporter/prometheusremotewriteexporter/helper.go index 0caff6d1d6d..fafdc3fc2f9 100644 --- a/exporter/prometheusremotewriteexporter/helper.go +++ b/exporter/prometheusremotewriteexporter/helper.go @@ -23,7 +23,7 @@ import ( "github.com/prometheus/prometheus/prompb" common "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" - otlp "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlp "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) const ( diff --git a/exporter/prometheusremotewriteexporter/helper_test.go b/exporter/prometheusremotewriteexporter/helper_test.go index 8a2e598a1fc..f3ccaa777e4 100644 --- a/exporter/prometheusremotewriteexporter/helper_test.go +++ b/exporter/prometheusremotewriteexporter/helper_test.go @@ -22,7 +22,7 @@ import ( "github.com/stretchr/testify/assert" common "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" - otlp "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlp "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) // Test_validateMetrics checks validateMetrics return true if a type and temporality combination is valid, false diff --git a/exporter/prometheusremotewriteexporter/testutil_test.go b/exporter/prometheusremotewriteexporter/testutil_test.go index 51099ba474b..d35afe54c1d 100644 --- a/exporter/prometheusremotewriteexporter/testutil_test.go +++ b/exporter/prometheusremotewriteexporter/testutil_test.go @@ -20,7 +20,7 @@ import ( "github.com/prometheus/prometheus/prompb" commonpb "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" - otlp "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlp "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) type combination struct { diff --git a/internal/data/opentelemetry-proto-gen/collector/metrics/v1/metrics_service.pb.go b/internal/data/opentelemetry-proto-gen/collector/metrics/v1/metrics_service.pb.go index 95ac23f43a7..3cf40ed8909 100644 --- a/internal/data/opentelemetry-proto-gen/collector/metrics/v1/metrics_service.pb.go +++ b/internal/data/opentelemetry-proto-gen/collector/metrics/v1/metrics_service.pb.go @@ -15,7 +15,7 @@ import ( codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" - v1 "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + v1 "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) // Reference imports to suppress errors if they are not otherwise used. diff --git a/internal/data/opentelemetry-proto-gen/metrics/v1/metrics.pb.go b/internal/data/opentelemetry-proto-gen/metrics/v1old/metrics.pb.go similarity index 99% rename from internal/data/opentelemetry-proto-gen/metrics/v1/metrics.pb.go rename to internal/data/opentelemetry-proto-gen/metrics/v1old/metrics.pb.go index 7340ce0274b..eeb7c6cf010 100644 --- a/internal/data/opentelemetry-proto-gen/metrics/v1/metrics.pb.go +++ b/internal/data/opentelemetry-proto-gen/metrics/v1old/metrics.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. // source: opentelemetry/proto/metrics/v1/metrics.proto -package v1 +package v1old import ( encoding_binary "encoding/binary" diff --git a/internal/data/testdata/common.go b/internal/data/testdata/common.go index f17fc02e60f..72d03336d13 100644 --- a/internal/data/testdata/common.go +++ b/internal/data/testdata/common.go @@ -104,102 +104,3 @@ func generateOtlpSpanLinkAttributes() []*otlpcommon.KeyValue { }, } } - -func initMetricLabels1(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey1: TestLabelValue1}) -} - -func generateOtlpMetricLabels1() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestLabelKey1, - Value: TestLabelValue1, - }, - } -} - -func initMetricLabelValue1(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey: TestLabelValue1}) -} - -func generateOtlpMetricLabelValue1() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestLabelKey, - Value: TestLabelValue1, - }, - } -} - -func initMetricLabels12(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey1: TestLabelValue1, TestLabelKey2: TestLabelValue2}).Sort() -} - -func generateOtlpMetricLabels12() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestLabelKey1, - Value: TestLabelValue1, - }, - { - Key: TestLabelKey2, - Value: TestLabelValue2, - }, - } -} - -func initMetricLabels13(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey1: TestLabelValue1, TestLabelKey3: TestLabelValue3}).Sort() -} - -func generateOtlpMetricLabels13() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestLabelKey1, - Value: TestLabelValue1, - }, - { - Key: TestLabelKey3, - Value: TestLabelValue3, - }, - } -} - -func initMetricLabels2(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey2: TestLabelValue2}) -} - -func generateOtlpMetricLabels2() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestLabelKey2, - Value: TestLabelValue2, - }, - } -} - -func initMetricLabelValue2(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey: TestLabelValue2}) -} - -func generateOtlpMetricLabelValue2() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestLabelKey, - Value: TestLabelValue2, - }, - } -} - -func initMetricAttachment(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestAttachmentKey: TestAttachmentValue}) -} - -func generateOtlpMetricAttachment() []*otlpcommon.StringKeyValue { - return []*otlpcommon.StringKeyValue{ - { - Key: TestAttachmentKey, - Value: TestAttachmentValue, - }, - } -} diff --git a/consumer/pdata/generated_metrics.go b/internal/dataold/generated_metrics.go similarity index 96% rename from consumer/pdata/generated_metrics.go rename to internal/dataold/generated_metrics.go index 43882af39cd..cdb9d59a041 100644 --- a/consumer/pdata/generated_metrics.go +++ b/internal/dataold/generated_metrics.go @@ -15,10 +15,11 @@ // Code generated by "cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run cmd/pdatagen/main.go". -package pdata +package dataold import ( - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + "go.opentelemetry.io/collector/consumer/pdata" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) // ResourceMetricsSlice logically represents a slice of ResourceMetrics. @@ -188,8 +189,8 @@ func (ms ResourceMetrics) IsNil() bool { // Empty initialized ResourceMetrics will return "nil" Resource. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms ResourceMetrics) Resource() Resource { - return newResource(&(*ms.orig).Resource) +func (ms ResourceMetrics) Resource() pdata.Resource { + return pdata.DeprecatedNewResource(&(*ms.orig).Resource) } // InstrumentationLibraryMetrics returns the InstrumentationLibraryMetrics associated with this ResourceMetrics. @@ -379,8 +380,8 @@ func (ms InstrumentationLibraryMetrics) IsNil() bool { // Empty initialized InstrumentationLibraryMetrics will return "nil" InstrumentationLibrary. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms InstrumentationLibraryMetrics) InstrumentationLibrary() InstrumentationLibrary { - return newInstrumentationLibrary(&(*ms.orig).InstrumentationLibrary) +func (ms InstrumentationLibraryMetrics) InstrumentationLibrary() pdata.InstrumentationLibrary { + return pdata.DeprecatedNewInstrumentationLibrary(&(*ms.orig).InstrumentationLibrary) } // Metrics returns the Metrics associated with this InstrumentationLibraryMetrics. @@ -892,35 +893,35 @@ func (ms Int64DataPoint) IsNil() bool { // LabelsMap returns the Labels associated with this Int64DataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms Int64DataPoint) LabelsMap() StringMap { - return newStringMap(&(*ms.orig).Labels) +func (ms Int64DataPoint) LabelsMap() pdata.StringMap { + return pdata.DeprecatedNewStringMap(&(*ms.orig).Labels) } // StartTime returns the starttime associated with this Int64DataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms Int64DataPoint) StartTime() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).StartTimeUnixNano) +func (ms Int64DataPoint) StartTime() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).StartTimeUnixNano) } // SetStartTime replaces the starttime associated with this Int64DataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms Int64DataPoint) SetStartTime(v TimestampUnixNano) { +func (ms Int64DataPoint) SetStartTime(v pdata.TimestampUnixNano) { (*ms.orig).StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this Int64DataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms Int64DataPoint) Timestamp() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).TimeUnixNano) +func (ms Int64DataPoint) Timestamp() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this Int64DataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms Int64DataPoint) SetTimestamp(v TimestampUnixNano) { +func (ms Int64DataPoint) SetTimestamp(v pdata.TimestampUnixNano) { (*ms.orig).TimeUnixNano = uint64(v) } @@ -1117,35 +1118,35 @@ func (ms DoubleDataPoint) IsNil() bool { // LabelsMap returns the Labels associated with this DoubleDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms DoubleDataPoint) LabelsMap() StringMap { - return newStringMap(&(*ms.orig).Labels) +func (ms DoubleDataPoint) LabelsMap() pdata.StringMap { + return pdata.DeprecatedNewStringMap(&(*ms.orig).Labels) } // StartTime returns the starttime associated with this DoubleDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms DoubleDataPoint) StartTime() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).StartTimeUnixNano) +func (ms DoubleDataPoint) StartTime() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).StartTimeUnixNano) } // SetStartTime replaces the starttime associated with this DoubleDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms DoubleDataPoint) SetStartTime(v TimestampUnixNano) { +func (ms DoubleDataPoint) SetStartTime(v pdata.TimestampUnixNano) { (*ms.orig).StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this DoubleDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms DoubleDataPoint) Timestamp() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).TimeUnixNano) +func (ms DoubleDataPoint) Timestamp() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this DoubleDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms DoubleDataPoint) SetTimestamp(v TimestampUnixNano) { +func (ms DoubleDataPoint) SetTimestamp(v pdata.TimestampUnixNano) { (*ms.orig).TimeUnixNano = uint64(v) } @@ -1342,35 +1343,35 @@ func (ms HistogramDataPoint) IsNil() bool { // LabelsMap returns the Labels associated with this HistogramDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramDataPoint) LabelsMap() StringMap { - return newStringMap(&(*ms.orig).Labels) +func (ms HistogramDataPoint) LabelsMap() pdata.StringMap { + return pdata.DeprecatedNewStringMap(&(*ms.orig).Labels) } // StartTime returns the starttime associated with this HistogramDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramDataPoint) StartTime() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).StartTimeUnixNano) +func (ms HistogramDataPoint) StartTime() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).StartTimeUnixNano) } // SetStartTime replaces the starttime associated with this HistogramDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramDataPoint) SetStartTime(v TimestampUnixNano) { +func (ms HistogramDataPoint) SetStartTime(v pdata.TimestampUnixNano) { (*ms.orig).StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this HistogramDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramDataPoint) Timestamp() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).TimeUnixNano) +func (ms HistogramDataPoint) Timestamp() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this HistogramDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramDataPoint) SetTimestamp(v TimestampUnixNano) { +func (ms HistogramDataPoint) SetTimestamp(v pdata.TimestampUnixNano) { (*ms.orig).TimeUnixNano = uint64(v) } @@ -1681,14 +1682,14 @@ func (ms HistogramBucketExemplar) IsNil() bool { // Timestamp returns the timestamp associated with this HistogramBucketExemplar. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramBucketExemplar) Timestamp() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).TimeUnixNano) +func (ms HistogramBucketExemplar) Timestamp() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this HistogramBucketExemplar. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramBucketExemplar) SetTimestamp(v TimestampUnixNano) { +func (ms HistogramBucketExemplar) SetTimestamp(v pdata.TimestampUnixNano) { (*ms.orig).TimeUnixNano = uint64(v) } @@ -1709,8 +1710,8 @@ func (ms HistogramBucketExemplar) SetValue(v float64) { // Attachments returns the Attachments associated with this HistogramBucketExemplar. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms HistogramBucketExemplar) Attachments() StringMap { - return newStringMap(&(*ms.orig).Attachments) +func (ms HistogramBucketExemplar) Attachments() pdata.StringMap { + return pdata.DeprecatedNewStringMap(&(*ms.orig).Attachments) } // CopyTo copies all properties from the current struct to the dest. @@ -1891,35 +1892,35 @@ func (ms SummaryDataPoint) IsNil() bool { // LabelsMap returns the Labels associated with this SummaryDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms SummaryDataPoint) LabelsMap() StringMap { - return newStringMap(&(*ms.orig).Labels) +func (ms SummaryDataPoint) LabelsMap() pdata.StringMap { + return pdata.DeprecatedNewStringMap(&(*ms.orig).Labels) } // StartTime returns the starttime associated with this SummaryDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms SummaryDataPoint) StartTime() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).StartTimeUnixNano) +func (ms SummaryDataPoint) StartTime() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).StartTimeUnixNano) } // SetStartTime replaces the starttime associated with this SummaryDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms SummaryDataPoint) SetStartTime(v TimestampUnixNano) { +func (ms SummaryDataPoint) SetStartTime(v pdata.TimestampUnixNano) { (*ms.orig).StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this SummaryDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms SummaryDataPoint) Timestamp() TimestampUnixNano { - return TimestampUnixNano((*ms.orig).TimeUnixNano) +func (ms SummaryDataPoint) Timestamp() pdata.TimestampUnixNano { + return pdata.TimestampUnixNano((*ms.orig).TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this SummaryDataPoint. // // Important: This causes a runtime error if IsNil() returns "true". -func (ms SummaryDataPoint) SetTimestamp(v TimestampUnixNano) { +func (ms SummaryDataPoint) SetTimestamp(v pdata.TimestampUnixNano) { (*ms.orig).TimeUnixNano = uint64(v) } diff --git a/consumer/pdata/generated_metrics_test.go b/internal/dataold/generated_metrics_test.go similarity index 95% rename from consumer/pdata/generated_metrics_test.go rename to internal/dataold/generated_metrics_test.go index b9f67b94f7a..88670123abe 100644 --- a/consumer/pdata/generated_metrics_test.go +++ b/internal/dataold/generated_metrics_test.go @@ -12,17 +12,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Code generated by "cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run cmd/pdatagen/main.go". - -package pdata +package dataold import ( "testing" "github.com/stretchr/testify/assert" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + "go.opentelemetry.io/collector/consumer/pdata" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) func TestResourceMetricsSlice(t *testing.T) { @@ -695,7 +693,7 @@ func TestInt64DataPoint_CopyTo(t *testing.T) { func TestInt64DataPoint_LabelsMap(t *testing.T) { ms := NewInt64DataPoint() ms.InitEmpty() - assert.EqualValues(t, NewStringMap(), ms.LabelsMap()) + assert.EqualValues(t, pdata.NewStringMap(), ms.LabelsMap()) fillTestStringMap(ms.LabelsMap()) testValLabelsMap := generateTestStringMap() assert.EqualValues(t, testValLabelsMap, ms.LabelsMap()) @@ -704,8 +702,8 @@ func TestInt64DataPoint_LabelsMap(t *testing.T) { func TestInt64DataPoint_StartTime(t *testing.T) { ms := NewInt64DataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.StartTime()) - testValStartTime := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.StartTime()) + testValStartTime := pdata.TimestampUnixNano(1234567890) ms.SetStartTime(testValStartTime) assert.EqualValues(t, testValStartTime, ms.StartTime()) } @@ -713,8 +711,8 @@ func TestInt64DataPoint_StartTime(t *testing.T) { func TestInt64DataPoint_Timestamp(t *testing.T) { ms := NewInt64DataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.Timestamp()) - testValTimestamp := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.Timestamp()) + testValTimestamp := pdata.TimestampUnixNano(1234567890) ms.SetTimestamp(testValTimestamp) assert.EqualValues(t, testValTimestamp, ms.Timestamp()) } @@ -864,7 +862,7 @@ func TestDoubleDataPoint_CopyTo(t *testing.T) { func TestDoubleDataPoint_LabelsMap(t *testing.T) { ms := NewDoubleDataPoint() ms.InitEmpty() - assert.EqualValues(t, NewStringMap(), ms.LabelsMap()) + assert.EqualValues(t, pdata.NewStringMap(), ms.LabelsMap()) fillTestStringMap(ms.LabelsMap()) testValLabelsMap := generateTestStringMap() assert.EqualValues(t, testValLabelsMap, ms.LabelsMap()) @@ -873,8 +871,8 @@ func TestDoubleDataPoint_LabelsMap(t *testing.T) { func TestDoubleDataPoint_StartTime(t *testing.T) { ms := NewDoubleDataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.StartTime()) - testValStartTime := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.StartTime()) + testValStartTime := pdata.TimestampUnixNano(1234567890) ms.SetStartTime(testValStartTime) assert.EqualValues(t, testValStartTime, ms.StartTime()) } @@ -882,8 +880,8 @@ func TestDoubleDataPoint_StartTime(t *testing.T) { func TestDoubleDataPoint_Timestamp(t *testing.T) { ms := NewDoubleDataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.Timestamp()) - testValTimestamp := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.Timestamp()) + testValTimestamp := pdata.TimestampUnixNano(1234567890) ms.SetTimestamp(testValTimestamp) assert.EqualValues(t, testValTimestamp, ms.Timestamp()) } @@ -1033,7 +1031,7 @@ func TestHistogramDataPoint_CopyTo(t *testing.T) { func TestHistogramDataPoint_LabelsMap(t *testing.T) { ms := NewHistogramDataPoint() ms.InitEmpty() - assert.EqualValues(t, NewStringMap(), ms.LabelsMap()) + assert.EqualValues(t, pdata.NewStringMap(), ms.LabelsMap()) fillTestStringMap(ms.LabelsMap()) testValLabelsMap := generateTestStringMap() assert.EqualValues(t, testValLabelsMap, ms.LabelsMap()) @@ -1042,8 +1040,8 @@ func TestHistogramDataPoint_LabelsMap(t *testing.T) { func TestHistogramDataPoint_StartTime(t *testing.T) { ms := NewHistogramDataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.StartTime()) - testValStartTime := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.StartTime()) + testValStartTime := pdata.TimestampUnixNano(1234567890) ms.SetStartTime(testValStartTime) assert.EqualValues(t, testValStartTime, ms.StartTime()) } @@ -1051,8 +1049,8 @@ func TestHistogramDataPoint_StartTime(t *testing.T) { func TestHistogramDataPoint_Timestamp(t *testing.T) { ms := NewHistogramDataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.Timestamp()) - testValTimestamp := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.Timestamp()) + testValTimestamp := pdata.TimestampUnixNano(1234567890) ms.SetTimestamp(testValTimestamp) assert.EqualValues(t, testValTimestamp, ms.Timestamp()) } @@ -1263,8 +1261,8 @@ func TestHistogramBucketExemplar_CopyTo(t *testing.T) { func TestHistogramBucketExemplar_Timestamp(t *testing.T) { ms := NewHistogramBucketExemplar() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.Timestamp()) - testValTimestamp := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.Timestamp()) + testValTimestamp := pdata.TimestampUnixNano(1234567890) ms.SetTimestamp(testValTimestamp) assert.EqualValues(t, testValTimestamp, ms.Timestamp()) } @@ -1281,7 +1279,7 @@ func TestHistogramBucketExemplar_Value(t *testing.T) { func TestHistogramBucketExemplar_Attachments(t *testing.T) { ms := NewHistogramBucketExemplar() ms.InitEmpty() - assert.EqualValues(t, NewStringMap(), ms.Attachments()) + assert.EqualValues(t, pdata.NewStringMap(), ms.Attachments()) fillTestStringMap(ms.Attachments()) testValAttachments := generateTestStringMap() assert.EqualValues(t, testValAttachments, ms.Attachments()) @@ -1423,7 +1421,7 @@ func TestSummaryDataPoint_CopyTo(t *testing.T) { func TestSummaryDataPoint_LabelsMap(t *testing.T) { ms := NewSummaryDataPoint() ms.InitEmpty() - assert.EqualValues(t, NewStringMap(), ms.LabelsMap()) + assert.EqualValues(t, pdata.NewStringMap(), ms.LabelsMap()) fillTestStringMap(ms.LabelsMap()) testValLabelsMap := generateTestStringMap() assert.EqualValues(t, testValLabelsMap, ms.LabelsMap()) @@ -1432,8 +1430,8 @@ func TestSummaryDataPoint_LabelsMap(t *testing.T) { func TestSummaryDataPoint_StartTime(t *testing.T) { ms := NewSummaryDataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.StartTime()) - testValStartTime := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.StartTime()) + testValStartTime := pdata.TimestampUnixNano(1234567890) ms.SetStartTime(testValStartTime) assert.EqualValues(t, testValStartTime, ms.StartTime()) } @@ -1441,8 +1439,8 @@ func TestSummaryDataPoint_StartTime(t *testing.T) { func TestSummaryDataPoint_Timestamp(t *testing.T) { ms := NewSummaryDataPoint() ms.InitEmpty() - assert.EqualValues(t, TimestampUnixNano(0), ms.Timestamp()) - testValTimestamp := TimestampUnixNano(1234567890) + assert.EqualValues(t, pdata.TimestampUnixNano(0), ms.Timestamp()) + testValTimestamp := pdata.TimestampUnixNano(1234567890) ms.SetTimestamp(testValTimestamp) assert.EqualValues(t, testValTimestamp, ms.Timestamp()) } @@ -1742,8 +1740,8 @@ func generateTestInt64DataPoint() Int64DataPoint { func fillTestInt64DataPoint(tv Int64DataPoint) { fillTestStringMap(tv.LabelsMap()) - tv.SetStartTime(TimestampUnixNano(1234567890)) - tv.SetTimestamp(TimestampUnixNano(1234567890)) + tv.SetStartTime(pdata.TimestampUnixNano(1234567890)) + tv.SetTimestamp(pdata.TimestampUnixNano(1234567890)) tv.SetValue(int64(-17)) } @@ -1769,8 +1767,8 @@ func generateTestDoubleDataPoint() DoubleDataPoint { func fillTestDoubleDataPoint(tv DoubleDataPoint) { fillTestStringMap(tv.LabelsMap()) - tv.SetStartTime(TimestampUnixNano(1234567890)) - tv.SetTimestamp(TimestampUnixNano(1234567890)) + tv.SetStartTime(pdata.TimestampUnixNano(1234567890)) + tv.SetTimestamp(pdata.TimestampUnixNano(1234567890)) tv.SetValue(float64(17.13)) } @@ -1796,8 +1794,8 @@ func generateTestHistogramDataPoint() HistogramDataPoint { func fillTestHistogramDataPoint(tv HistogramDataPoint) { fillTestStringMap(tv.LabelsMap()) - tv.SetStartTime(TimestampUnixNano(1234567890)) - tv.SetTimestamp(TimestampUnixNano(1234567890)) + tv.SetStartTime(pdata.TimestampUnixNano(1234567890)) + tv.SetTimestamp(pdata.TimestampUnixNano(1234567890)) tv.SetCount(uint64(17)) tv.SetSum(float64(17.13)) fillTestHistogramBucketSlice(tv.Buckets()) @@ -1838,7 +1836,7 @@ func generateTestHistogramBucketExemplar() HistogramBucketExemplar { } func fillTestHistogramBucketExemplar(tv HistogramBucketExemplar) { - tv.SetTimestamp(TimestampUnixNano(1234567890)) + tv.SetTimestamp(pdata.TimestampUnixNano(1234567890)) tv.SetValue(float64(17.13)) fillTestStringMap(tv.Attachments()) } @@ -1865,8 +1863,8 @@ func generateTestSummaryDataPoint() SummaryDataPoint { func fillTestSummaryDataPoint(tv SummaryDataPoint) { fillTestStringMap(tv.LabelsMap()) - tv.SetStartTime(TimestampUnixNano(1234567890)) - tv.SetTimestamp(TimestampUnixNano(1234567890)) + tv.SetStartTime(pdata.TimestampUnixNano(1234567890)) + tv.SetTimestamp(pdata.TimestampUnixNano(1234567890)) tv.SetCount(uint64(17)) tv.SetSum(float64(17.13)) fillTestSummaryValueAtPercentileSlice(tv.ValueAtPercentiles()) @@ -1896,3 +1894,44 @@ func fillTestSummaryValueAtPercentile(tv SummaryValueAtPercentile) { tv.SetPercentile(float64(0.90)) tv.SetValue(float64(17.13)) } + +func generateTestInstrumentationLibrary() pdata.InstrumentationLibrary { + tv := pdata.NewInstrumentationLibrary() + tv.InitEmpty() + fillTestInstrumentationLibrary(tv) + return tv +} + +func fillTestInstrumentationLibrary(tv pdata.InstrumentationLibrary) { + tv.SetName("test_name") + tv.SetVersion("test_version") +} + +func generateTestStringMap() pdata.StringMap { + sm := pdata.NewStringMap() + fillTestStringMap(sm) + return sm +} + +func fillTestStringMap(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{ + "k": "v", + }) +} + +func generateTestResource() pdata.Resource { + tv := pdata.NewResource() + tv.InitEmpty() + fillTestResource(tv) + return tv +} + +func fillTestResource(tv pdata.Resource) { + fillTestAttributeMap(tv.Attributes()) +} + +func fillTestAttributeMap(dest pdata.AttributeMap) { + dest.InitFromMap(map[string]pdata.AttributeValue{ + "k": pdata.NewAttributeValueString("v"), + }) +} diff --git a/internal/data/metric.go b/internal/dataold/metric.go similarity index 80% rename from internal/data/metric.go rename to internal/dataold/metric.go index bd565bc3034..8118fca8049 100644 --- a/internal/data/metric.go +++ b/internal/dataold/metric.go @@ -12,15 +12,30 @@ // See the License for the specific language governing permissions and // limitations under the License. -package data +package dataold import ( "github.com/gogo/protobuf/proto" - "go.opentelemetry.io/collector/consumer/pdata" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" ) +type MetricType otlpmetrics.MetricDescriptor_Type + +const ( + MetricTypeInvalid = MetricType(otlpmetrics.MetricDescriptor_INVALID_TYPE) + MetricTypeInt64 = MetricType(otlpmetrics.MetricDescriptor_INT64) + MetricTypeDouble = MetricType(otlpmetrics.MetricDescriptor_DOUBLE) + MetricTypeMonotonicInt64 = MetricType(otlpmetrics.MetricDescriptor_MONOTONIC_INT64) + MetricTypeMonotonicDouble = MetricType(otlpmetrics.MetricDescriptor_MONOTONIC_DOUBLE) + MetricTypeHistogram = MetricType(otlpmetrics.MetricDescriptor_HISTOGRAM) + MetricTypeSummary = MetricType(otlpmetrics.MetricDescriptor_SUMMARY) +) + +func (mt MetricType) String() string { + return otlpmetrics.MetricDescriptor_Type(mt).String() +} + // This file defines in-memory data structures to represent metrics. // For the proto representation see https://github.com/open-telemetry/opentelemetry-proto/blob/master/opentelemetry/proto/metrics/v1/metrics.proto @@ -63,8 +78,8 @@ func (md MetricData) Clone() MetricData { return MetricDataFromOtlp(resourceMetricsClones) } -func (md MetricData) ResourceMetrics() pdata.ResourceMetricsSlice { - return pdata.InternalNewMetricsResourceSlice(md.orig) +func (md MetricData) ResourceMetrics() ResourceMetricsSlice { + return newResourceMetricsSlice(md.orig) } // MetricCount calculates the total number of metrics. diff --git a/internal/data/metric_test.go b/internal/dataold/metric_test.go similarity index 94% rename from internal/data/metric_test.go rename to internal/dataold/metric_test.go index 3cd8b135393..45617fc70e0 100644 --- a/internal/data/metric_test.go +++ b/internal/dataold/metric_test.go @@ -12,17 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -package data +package dataold import ( "testing" + gogoproto "github.com/gogo/protobuf/proto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + goproto "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/emptypb" "go.opentelemetry.io/collector/consumer/pdata" otlpcommon "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" otlpresource "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/resource/v1" ) @@ -31,6 +34,39 @@ const ( endTime = uint64(12578940000000054321) ) +func TestResourceMetricsWireCompatibility(t *testing.T) { + // This test verifies that OTLP ProtoBufs generated using goproto lib in + // opentelemetry-proto repository OTLP ProtoBufs generated using gogoproto lib in + // this repository are wire compatible. + + // Generate ResourceMetrics as pdata struct. + pdataRM := generateTestResourceMetrics() + + // Marshal its underlying ProtoBuf to wire. + wire1, err := gogoproto.Marshal(*pdataRM.orig) + assert.NoError(t, err) + assert.NotNil(t, wire1) + + // Unmarshal from the wire to OTLP Protobuf in goproto's representation. + var goprotoMessage emptypb.Empty + err = goproto.Unmarshal(wire1, &goprotoMessage) + assert.NoError(t, err) + + // Marshal to the wire again. + wire2, err := goproto.Marshal(&goprotoMessage) + assert.NoError(t, err) + assert.NotNil(t, wire2) + + // Unmarshal from the wire into gogoproto's representation. + var gogoprotoRM otlpmetrics.ResourceMetrics + err = gogoproto.Unmarshal(wire2, &gogoprotoRM) + assert.NoError(t, err) + + // Now compare that the original and final ProtoBuf messages are the same. + // This proves that goproto and gogoproto marshaling/unmarshaling are wire compatible. + assert.True(t, gogoproto.Equal(*pdataRM.orig, &gogoprotoRM)) +} + func TestMetricCount(t *testing.T) { md := NewMetricData() assert.EqualValues(t, 0, md.MetricCount()) @@ -223,7 +259,7 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, "my_metric_int", metricInt.MetricDescriptor().Name()) assert.EqualValues(t, "My metric", metricInt.MetricDescriptor().Description()) assert.EqualValues(t, "ms", metricInt.MetricDescriptor().Unit()) - assert.EqualValues(t, pdata.MetricTypeMonotonicInt64, metricInt.MetricDescriptor().Type()) + assert.EqualValues(t, MetricTypeMonotonicInt64, metricInt.MetricDescriptor().Type()) int64DataPoints := metricInt.Int64DataPoints() assert.EqualValues(t, 2, int64DataPoints.Len()) // First point @@ -242,7 +278,7 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, "my_metric_double", metricDouble.MetricDescriptor().Name()) assert.EqualValues(t, "My metric", metricDouble.MetricDescriptor().Description()) assert.EqualValues(t, "ms", metricDouble.MetricDescriptor().Unit()) - assert.EqualValues(t, pdata.MetricTypeMonotonicDouble, metricDouble.MetricDescriptor().Type()) + assert.EqualValues(t, MetricTypeMonotonicDouble, metricDouble.MetricDescriptor().Type()) doubleDataPoints := metricDouble.DoubleDataPoints() assert.EqualValues(t, 2, doubleDataPoints.Len()) // First point @@ -261,7 +297,7 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, "my_metric_histogram", metricHistogram.MetricDescriptor().Name()) assert.EqualValues(t, "My metric", metricHistogram.MetricDescriptor().Description()) assert.EqualValues(t, "ms", metricHistogram.MetricDescriptor().Unit()) - assert.EqualValues(t, pdata.MetricTypeHistogram, metricHistogram.MetricDescriptor().Type()) + assert.EqualValues(t, MetricTypeHistogram, metricHistogram.MetricDescriptor().Type()) histogramDataPoints := metricHistogram.HistogramDataPoints() assert.EqualValues(t, 2, histogramDataPoints.Len()) // First point @@ -290,7 +326,7 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, "my_metric_summary", metricSummary.MetricDescriptor().Name()) assert.EqualValues(t, "My metric", metricSummary.MetricDescriptor().Description()) assert.EqualValues(t, "ms", metricSummary.MetricDescriptor().Unit()) - assert.EqualValues(t, pdata.MetricTypeSummary, metricSummary.MetricDescriptor().Type()) + assert.EqualValues(t, MetricTypeSummary, metricSummary.MetricDescriptor().Type()) summaryDataPoints := metricSummary.SummaryDataPoints() assert.EqualValues(t, 2, summaryDataPoints.Len()) // First point @@ -361,8 +397,8 @@ func TestOtlpToFromInternalIntPointsMutating(t *testing.T) { assert.EqualValues(t, "My new metric", metric.MetricDescriptor().Description()) metric.MetricDescriptor().SetUnit("1") assert.EqualValues(t, "1", metric.MetricDescriptor().Unit()) - metric.MetricDescriptor().SetType(pdata.MetricTypeInt64) - assert.EqualValues(t, pdata.MetricTypeInt64, metric.MetricDescriptor().Type()) + metric.MetricDescriptor().SetType(MetricTypeInt64) + assert.EqualValues(t, MetricTypeInt64, metric.MetricDescriptor().Type()) // Mutate DataPoints assert.EqualValues(t, 2, metric.Int64DataPoints().Len()) metric.Int64DataPoints().Resize(1) @@ -437,8 +473,8 @@ func TestOtlpToFromInternalDoublePointsMutating(t *testing.T) { assert.EqualValues(t, "My new metric", metric.MetricDescriptor().Description()) metric.MetricDescriptor().SetUnit("1") assert.EqualValues(t, "1", metric.MetricDescriptor().Unit()) - metric.MetricDescriptor().SetType(pdata.MetricTypeDouble) - assert.EqualValues(t, pdata.MetricTypeDouble, metric.MetricDescriptor().Type()) + metric.MetricDescriptor().SetType(MetricTypeDouble) + assert.EqualValues(t, MetricTypeDouble, metric.MetricDescriptor().Type()) // Mutate DataPoints assert.EqualValues(t, 2, metric.DoubleDataPoints().Len()) metric.DoubleDataPoints().Resize(1) @@ -513,8 +549,8 @@ func TestOtlpToFromInternalHistogramPointsMutating(t *testing.T) { assert.EqualValues(t, "My new metric", metric.MetricDescriptor().Description()) metric.MetricDescriptor().SetUnit("1") assert.EqualValues(t, "1", metric.MetricDescriptor().Unit()) - metric.MetricDescriptor().SetType(pdata.MetricTypeHistogram) - assert.EqualValues(t, pdata.MetricTypeHistogram, metric.MetricDescriptor().Type()) + metric.MetricDescriptor().SetType(MetricTypeHistogram) + assert.EqualValues(t, MetricTypeHistogram, metric.MetricDescriptor().Type()) // Mutate DataPoints assert.EqualValues(t, 2, metric.HistogramDataPoints().Len()) metric.HistogramDataPoints().Resize(1) diff --git a/internal/dataold/testdataold/common.go b/internal/dataold/testdataold/common.go new file mode 100644 index 00000000000..2ce854600b2 --- /dev/null +++ b/internal/dataold/testdataold/common.go @@ -0,0 +1,147 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testdataold + +import ( + "go.opentelemetry.io/collector/consumer/pdata" + otlpcommon "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" + otlpresource "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/resource/v1" + "go.opentelemetry.io/collector/internal/data/testdata" +) + +var resourceAttributes1 = map[string]pdata.AttributeValue{"resource-attr": pdata.NewAttributeValueString("resource-attr-val-1")} + +func initResource1(r pdata.Resource) { + r.InitEmpty() + initResourceAttributes1(r.Attributes()) +} + +func generateOtlpResource1() *otlpresource.Resource { + return &otlpresource.Resource{ + Attributes: generateOtlpResourceAttributes1(), + } +} + +func initResourceAttributes1(dest pdata.AttributeMap) { + dest.InitFromMap(resourceAttributes1) +} + +func generateOtlpResourceAttributes1() []*otlpcommon.KeyValue { + return []*otlpcommon.KeyValue{ + { + Key: "resource-attr", + Value: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: "resource-attr-val-1"}}, + }, + } +} + +func initMetricLabels1(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestLabelKey1: testdata.TestLabelValue1}) +} + +func generateOtlpMetricLabels1() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestLabelKey1, + Value: testdata.TestLabelValue1, + }, + } +} + +func initMetricLabelValue1(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestLabelKey: testdata.TestLabelValue1}) +} + +func generateOtlpMetricLabelValue1() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestLabelKey, + Value: testdata.TestLabelValue1, + }, + } +} + +func initMetricLabels12(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestLabelKey1: testdata.TestLabelValue1, testdata.TestLabelKey2: testdata.TestLabelValue2}).Sort() +} + +func generateOtlpMetricLabels12() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestLabelKey1, + Value: testdata.TestLabelValue1, + }, + { + Key: testdata.TestLabelKey2, + Value: testdata.TestLabelValue2, + }, + } +} + +func initMetricLabels13(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestLabelKey1: testdata.TestLabelValue1, testdata.TestLabelKey3: testdata.TestLabelValue3}).Sort() +} + +func generateOtlpMetricLabels13() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestLabelKey1, + Value: testdata.TestLabelValue1, + }, + { + Key: testdata.TestLabelKey3, + Value: testdata.TestLabelValue3, + }, + } +} + +func initMetricLabels2(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestLabelKey2: testdata.TestLabelValue2}) +} + +func generateOtlpMetricLabels2() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestLabelKey2, + Value: testdata.TestLabelValue2, + }, + } +} + +func initMetricLabelValue2(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestLabelKey: testdata.TestLabelValue2}) +} + +func generateOtlpMetricLabelValue2() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestLabelKey, + Value: testdata.TestLabelValue2, + }, + } +} + +func initMetricAttachment(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{testdata.TestAttachmentKey: testdata.TestAttachmentValue}) +} + +func generateOtlpMetricAttachment() []*otlpcommon.StringKeyValue { + return []*otlpcommon.StringKeyValue{ + { + Key: testdata.TestAttachmentKey, + Value: testdata.TestAttachmentValue, + }, + } +} diff --git a/internal/data/testdata/metric.go b/internal/dataold/testdataold/metric.go similarity index 78% rename from internal/data/testdata/metric.go rename to internal/dataold/testdataold/metric.go index 03e66a8b8f1..62ebd8d791e 100644 --- a/internal/data/testdata/metric.go +++ b/internal/dataold/testdataold/metric.go @@ -12,15 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -package testdata +package testdataold import ( "time" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" - "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" + "go.opentelemetry.io/collector/internal/dataold" ) var ( @@ -44,8 +43,8 @@ const ( NumMetricTests = 14 ) -func GenerateMetricDataEmpty() data.MetricData { - md := data.NewMetricData() +func GenerateMetricDataEmpty() dataold.MetricData { + md := dataold.NewMetricData() return md } @@ -53,7 +52,7 @@ func generateMetricOtlpEmpty() []*otlpmetrics.ResourceMetrics { return []*otlpmetrics.ResourceMetrics(nil) } -func GenerateMetricDataOneEmptyResourceMetrics() data.MetricData { +func GenerateMetricDataOneEmptyResourceMetrics() dataold.MetricData { md := GenerateMetricDataEmpty() md.ResourceMetrics().Resize(1) return md @@ -65,8 +64,8 @@ func generateMetricOtlpOneEmptyResourceMetrics() []*otlpmetrics.ResourceMetrics } } -func GenerateMetricDataOneEmptyOneNilResourceMetrics() data.MetricData { - return data.MetricDataFromOtlp(generateMetricOtlpOneEmptyOneNilResourceMetrics()) +func GenerateMetricDataOneEmptyOneNilResourceMetrics() dataold.MetricData { + return dataold.MetricDataFromOtlp(generateMetricOtlpOneEmptyOneNilResourceMetrics()) } func generateMetricOtlpOneEmptyOneNilResourceMetrics() []*otlpmetrics.ResourceMetrics { @@ -76,7 +75,7 @@ func generateMetricOtlpOneEmptyOneNilResourceMetrics() []*otlpmetrics.ResourceMe } } -func GenerateMetricDataNoLibraries() data.MetricData { +func GenerateMetricDataNoLibraries() dataold.MetricData { md := GenerateMetricDataOneEmptyResourceMetrics() ms0 := md.ResourceMetrics().At(0) initResource1(ms0.Resource()) @@ -91,7 +90,7 @@ func generateMetricOtlpNoLibraries() []*otlpmetrics.ResourceMetrics { } } -func GenerateMetricDataOneEmptyInstrumentationLibrary() data.MetricData { +func GenerateMetricDataOneEmptyInstrumentationLibrary() dataold.MetricData { md := GenerateMetricDataNoLibraries() md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().Resize(1) return md @@ -109,8 +108,8 @@ func generateMetricOtlpOneEmptyInstrumentationLibrary() []*otlpmetrics.ResourceM } } -func GenerateMetricDataOneEmptyOneNilInstrumentationLibrary() data.MetricData { - return data.MetricDataFromOtlp(generateMetricOtlpOneEmptyOneNilInstrumentationLibrary()) +func GenerateMetricDataOneEmptyOneNilInstrumentationLibrary() dataold.MetricData { + return dataold.MetricDataFromOtlp(generateMetricOtlpOneEmptyOneNilInstrumentationLibrary()) } func generateMetricOtlpOneEmptyOneNilInstrumentationLibrary() []*otlpmetrics.ResourceMetrics { @@ -125,7 +124,7 @@ func generateMetricOtlpOneEmptyOneNilInstrumentationLibrary() []*otlpmetrics.Res } } -func GenerateMetricDataOneMetricNoResource() data.MetricData { +func GenerateMetricDataOneMetricNoResource() dataold.MetricData { md := GenerateMetricDataOneEmptyResourceMetrics() rm0 := md.ResourceMetrics().At(0) rm0.InstrumentationLibraryMetrics().Resize(1) @@ -149,7 +148,7 @@ func generateMetricOtlpOneMetricNoResource() []*otlpmetrics.ResourceMetrics { } } -func GenerateMetricDataOneMetric() data.MetricData { +func GenerateMetricDataOneMetric() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() rm0ils0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) rm0ils0.Metrics().Resize(1) @@ -172,7 +171,7 @@ func generateMetricOtlpOneMetric() []*otlpmetrics.ResourceMetrics { } } -func GenerateMetricDataOneMetricOneDataPoint() data.MetricData { +func GenerateMetricDataOneMetricOneDataPoint() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() rm0ils0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) rm0ils0.Metrics().Resize(1) @@ -180,7 +179,7 @@ func GenerateMetricDataOneMetricOneDataPoint() data.MetricData { return md } -func GenerateMetricDataTwoMetrics() data.MetricData { +func GenerateMetricDataTwoMetrics() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() rm0ils0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) rm0ils0.Metrics().Resize(2) @@ -205,8 +204,8 @@ func GenerateMetricOtlpTwoMetrics() []*otlpmetrics.ResourceMetrics { } } -func GenerateMetricDataOneMetricOneNil() data.MetricData { - return data.MetricDataFromOtlp(generateMetricOtlpOneMetricOneNil()) +func GenerateMetricDataOneMetricOneNil() dataold.MetricData { + return dataold.MetricDataFromOtlp(generateMetricOtlpOneMetricOneNil()) } func generateMetricOtlpOneMetricOneNil() []*otlpmetrics.ResourceMetrics { @@ -225,7 +224,7 @@ func generateMetricOtlpOneMetricOneNil() []*otlpmetrics.ResourceMetrics { } } -func GenerateMetricDataOneMetricNoLabels() data.MetricData { +func GenerateMetricDataOneMetricNoLabels() dataold.MetricData { md := GenerateMetricDataOneMetric() dps := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Int64DataPoints() dps.At(0).LabelsMap().InitFromMap(map[string]string{}) @@ -241,8 +240,8 @@ func generateMetricOtlpOneMetricNoLabels() []*otlpmetrics.ResourceMetrics { return md } -func GenerateMetricDataOneMetricOneNilPoint() data.MetricData { - return data.MetricDataFromOtlp(generateMetricOtlpOneMetricOneNilPoint()) +func GenerateMetricDataOneMetricOneNilPoint() dataold.MetricData { + return dataold.MetricDataFromOtlp(generateMetricOtlpOneMetricOneNilPoint()) } func generateMetricOtlpOneMetricOneNilPoint() []*otlpmetrics.ResourceMetrics { @@ -252,95 +251,95 @@ func generateMetricOtlpOneMetricOneNilPoint() []*otlpmetrics.ResourceMetrics { return md } -func GenerateMetricDataAllTypesNoDataPoints() data.MetricData { +func GenerateMetricDataAllTypesNoDataPoints() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() ilm0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) ms := ilm0.Metrics() ms.Resize(6) initMetricDescriptor( - ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, pdata.MetricTypeDouble) + ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, dataold.MetricTypeDouble) initMetricDescriptor( - ms.At(1).MetricDescriptor(), TestGaugeIntMetricName, pdata.MetricTypeInt64) + ms.At(1).MetricDescriptor(), TestGaugeIntMetricName, dataold.MetricTypeInt64) initMetricDescriptor( - ms.At(2).MetricDescriptor(), TestCounterDoubleMetricName, pdata.MetricTypeMonotonicDouble) + ms.At(2).MetricDescriptor(), TestCounterDoubleMetricName, dataold.MetricTypeMonotonicDouble) initMetricDescriptor( - ms.At(3).MetricDescriptor(), TestCounterIntMetricName, pdata.MetricTypeMonotonicInt64) + ms.At(3).MetricDescriptor(), TestCounterIntMetricName, dataold.MetricTypeMonotonicInt64) initMetricDescriptor( - ms.At(4).MetricDescriptor(), TestCumulativeHistogramMetricName, pdata.MetricTypeHistogram) + ms.At(4).MetricDescriptor(), TestCumulativeHistogramMetricName, dataold.MetricTypeHistogram) initMetricDescriptor( - ms.At(5).MetricDescriptor(), TestSummaryMetricName, pdata.MetricTypeSummary) + ms.At(5).MetricDescriptor(), TestSummaryMetricName, dataold.MetricTypeSummary) return md } -func GenerateMetricDataAllTypesNilDataPoint() data.MetricData { +func GenerateMetricDataAllTypesNilDataPoint() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() ilm0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) ms := ilm0.Metrics() ms.Resize(6) - nilInt64 := pdata.NewInt64DataPoint() - nilDouble := pdata.NewDoubleDataPoint() - nilHistogram := pdata.NewHistogramDataPoint() - nilSummary := pdata.NewSummaryDataPoint() + nilInt64 := dataold.NewInt64DataPoint() + nilDouble := dataold.NewDoubleDataPoint() + nilHistogram := dataold.NewHistogramDataPoint() + nilSummary := dataold.NewSummaryDataPoint() initMetricDescriptor( - ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, pdata.MetricTypeDouble) + ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, dataold.MetricTypeDouble) ms.At(0).DoubleDataPoints().Append(&nilDouble) initMetricDescriptor( - ms.At(1).MetricDescriptor(), TestGaugeIntMetricName, pdata.MetricTypeInt64) + ms.At(1).MetricDescriptor(), TestGaugeIntMetricName, dataold.MetricTypeInt64) ms.At(1).Int64DataPoints().Append(&nilInt64) initMetricDescriptor( - ms.At(2).MetricDescriptor(), TestCounterDoubleMetricName, pdata.MetricTypeMonotonicDouble) + ms.At(2).MetricDescriptor(), TestCounterDoubleMetricName, dataold.MetricTypeMonotonicDouble) ms.At(2).DoubleDataPoints().Append(&nilDouble) initMetricDescriptor( - ms.At(3).MetricDescriptor(), TestCounterIntMetricName, pdata.MetricTypeMonotonicInt64) + ms.At(3).MetricDescriptor(), TestCounterIntMetricName, dataold.MetricTypeMonotonicInt64) ms.At(3).Int64DataPoints().Append(&nilInt64) initMetricDescriptor( - ms.At(4).MetricDescriptor(), TestCumulativeHistogramMetricName, pdata.MetricTypeHistogram) + ms.At(4).MetricDescriptor(), TestCumulativeHistogramMetricName, dataold.MetricTypeHistogram) ms.At(4).HistogramDataPoints().Append(&nilHistogram) initMetricDescriptor( - ms.At(5).MetricDescriptor(), TestSummaryMetricName, pdata.MetricTypeSummary) + ms.At(5).MetricDescriptor(), TestSummaryMetricName, dataold.MetricTypeSummary) ms.At(5).SummaryDataPoints().Append(&nilSummary) return md } -func GenerateMetricDataAllTypesEmptyDataPoint() data.MetricData { +func GenerateMetricDataAllTypesEmptyDataPoint() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() ilm0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) ms := ilm0.Metrics() ms.Resize(6) - emptyInt64 := pdata.NewInt64DataPoint() + emptyInt64 := dataold.NewInt64DataPoint() emptyInt64.InitEmpty() - emptyDouble := pdata.NewDoubleDataPoint() + emptyDouble := dataold.NewDoubleDataPoint() emptyDouble.InitEmpty() - emptyHistogram := pdata.NewHistogramDataPoint() + emptyHistogram := dataold.NewHistogramDataPoint() emptyHistogram.InitEmpty() - emptySummary := pdata.NewSummaryDataPoint() + emptySummary := dataold.NewSummaryDataPoint() emptySummary.InitEmpty() initMetricDescriptor( - ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, pdata.MetricTypeDouble) + ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, dataold.MetricTypeDouble) ms.At(0).DoubleDataPoints().Append(&emptyDouble) initMetricDescriptor( - ms.At(1).MetricDescriptor(), TestGaugeIntMetricName, pdata.MetricTypeInt64) + ms.At(1).MetricDescriptor(), TestGaugeIntMetricName, dataold.MetricTypeInt64) ms.At(1).Int64DataPoints().Append(&emptyInt64) initMetricDescriptor( - ms.At(2).MetricDescriptor(), TestCounterDoubleMetricName, pdata.MetricTypeMonotonicDouble) + ms.At(2).MetricDescriptor(), TestCounterDoubleMetricName, dataold.MetricTypeMonotonicDouble) ms.At(2).DoubleDataPoints().Append(&emptyDouble) initMetricDescriptor( - ms.At(3).MetricDescriptor(), TestCounterIntMetricName, pdata.MetricTypeMonotonicInt64) + ms.At(3).MetricDescriptor(), TestCounterIntMetricName, dataold.MetricTypeMonotonicInt64) ms.At(3).Int64DataPoints().Append(&emptyInt64) initMetricDescriptor( - ms.At(4).MetricDescriptor(), TestCumulativeHistogramMetricName, pdata.MetricTypeHistogram) + ms.At(4).MetricDescriptor(), TestCumulativeHistogramMetricName, dataold.MetricTypeHistogram) ms.At(4).HistogramDataPoints().Append(&emptyHistogram) initMetricDescriptor( - ms.At(5).MetricDescriptor(), TestSummaryMetricName, pdata.MetricTypeSummary) + ms.At(5).MetricDescriptor(), TestSummaryMetricName, dataold.MetricTypeSummary) ms.At(5).SummaryDataPoints().Append(&emptySummary) return md } -func GenerateMetricDataNilMetricDescriptor() data.MetricData { +func GenerateMetricDataNilMetricDescriptor() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() ilm0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) ms := ilm0.Metrics() @@ -348,14 +347,14 @@ func GenerateMetricDataNilMetricDescriptor() data.MetricData { return md } -func GenerateMetricDataMetricTypeInvalid() data.MetricData { +func GenerateMetricDataMetricTypeInvalid() dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() ilm0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) ms := ilm0.Metrics() ms.Resize(1) initMetricDescriptor( - ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, pdata.MetricTypeInvalid) + ms.At(0).MetricDescriptor(), TestGaugeDoubleMetricName, dataold.MetricTypeInvalid) return md } @@ -367,22 +366,22 @@ func generateMetricOtlpAllTypesNoDataPoints() []*otlpmetrics.ResourceMetrics { { Metrics: []*otlpmetrics.Metric{ { - MetricDescriptor: generateOtlpMetricDescriptor(TestGaugeDoubleMetricName, pdata.MetricTypeDouble), + MetricDescriptor: generateOtlpMetricDescriptor(TestGaugeDoubleMetricName, dataold.MetricTypeDouble), }, { - MetricDescriptor: generateOtlpMetricDescriptor(TestGaugeIntMetricName, pdata.MetricTypeInt64), + MetricDescriptor: generateOtlpMetricDescriptor(TestGaugeIntMetricName, dataold.MetricTypeInt64), }, { - MetricDescriptor: generateOtlpMetricDescriptor(TestCounterDoubleMetricName, pdata.MetricTypeMonotonicDouble), + MetricDescriptor: generateOtlpMetricDescriptor(TestCounterDoubleMetricName, dataold.MetricTypeMonotonicDouble), }, { - MetricDescriptor: generateOtlpMetricDescriptor(TestCounterIntMetricName, pdata.MetricTypeMonotonicInt64), + MetricDescriptor: generateOtlpMetricDescriptor(TestCounterIntMetricName, dataold.MetricTypeMonotonicInt64), }, { - MetricDescriptor: generateOtlpMetricDescriptor(TestCumulativeHistogramMetricName, pdata.MetricTypeHistogram), + MetricDescriptor: generateOtlpMetricDescriptor(TestCumulativeHistogramMetricName, dataold.MetricTypeHistogram), }, { - MetricDescriptor: generateOtlpMetricDescriptor(TestSummaryMetricName, pdata.MetricTypeSummary), + MetricDescriptor: generateOtlpMetricDescriptor(TestSummaryMetricName, dataold.MetricTypeSummary), }, }, }, @@ -391,8 +390,8 @@ func generateMetricOtlpAllTypesNoDataPoints() []*otlpmetrics.ResourceMetrics { } } -func GenerateMetricDataWithCountersHistogramAndSummary() data.MetricData { - metricData := data.NewMetricData() +func GenerateMetricDataWithCountersHistogramAndSummary() dataold.MetricData { + metricData := dataold.NewMetricData() metricData.ResourceMetrics().Resize(1) rms := metricData.ResourceMetrics() @@ -429,8 +428,8 @@ func generateMetricOtlpWithCountersHistogramAndSummary() []*otlpmetrics.Resource } } -func initCounterIntMetric(im pdata.Metric) { - initMetricDescriptor(im.MetricDescriptor(), TestCounterIntMetricName, pdata.MetricTypeMonotonicInt64) +func initCounterIntMetric(im dataold.Metric) { + initMetricDescriptor(im.MetricDescriptor(), TestCounterIntMetricName, dataold.MetricTypeMonotonicInt64) idps := im.Int64DataPoints() idps.Resize(2) @@ -446,8 +445,8 @@ func initCounterIntMetric(im pdata.Metric) { idp1.SetValue(456) } -func initGaugeIntMetricOneDataPoint(im pdata.Metric) { - initMetricDescriptor(im.MetricDescriptor(), TestCounterIntMetricName, pdata.MetricTypeInt64) +func initGaugeIntMetricOneDataPoint(im dataold.Metric) { + initMetricDescriptor(im.MetricDescriptor(), TestCounterIntMetricName, dataold.MetricTypeInt64) idps := im.Int64DataPoints() idps.Resize(1) idp0 := idps.At(0) @@ -459,7 +458,7 @@ func initGaugeIntMetricOneDataPoint(im pdata.Metric) { func generateOtlpCounterIntMetric() *otlpmetrics.Metric { return &otlpmetrics.Metric{ - MetricDescriptor: generateOtlpMetricDescriptor(TestCounterIntMetricName, pdata.MetricTypeMonotonicInt64), + MetricDescriptor: generateOtlpMetricDescriptor(TestCounterIntMetricName, dataold.MetricTypeMonotonicInt64), Int64DataPoints: []*otlpmetrics.Int64DataPoint{ { Labels: generateOtlpMetricLabels1(), @@ -477,8 +476,8 @@ func generateOtlpCounterIntMetric() *otlpmetrics.Metric { } } -func initCounterDoubleMetric(dm pdata.Metric) { - initMetricDescriptor(dm.MetricDescriptor(), TestCounterDoubleMetricName, pdata.MetricTypeMonotonicDouble) +func initCounterDoubleMetric(dm dataold.Metric) { + initMetricDescriptor(dm.MetricDescriptor(), TestCounterDoubleMetricName, dataold.MetricTypeMonotonicDouble) ddps := dm.DoubleDataPoints() ddps.Resize(2) @@ -498,7 +497,7 @@ func initCounterDoubleMetric(dm pdata.Metric) { func generateOtlpCounterDoubleMetric() *otlpmetrics.Metric { return &otlpmetrics.Metric{ - MetricDescriptor: generateOtlpMetricDescriptor(TestCounterDoubleMetricName, pdata.MetricTypeMonotonicDouble), + MetricDescriptor: generateOtlpMetricDescriptor(TestCounterDoubleMetricName, dataold.MetricTypeMonotonicDouble), DoubleDataPoints: []*otlpmetrics.DoubleDataPoint{ { Labels: generateOtlpMetricLabels12(), @@ -516,8 +515,8 @@ func generateOtlpCounterDoubleMetric() *otlpmetrics.Metric { } } -func initCumulativeHistogramMetric(hm pdata.Metric) { - initMetricDescriptor(hm.MetricDescriptor(), TestCumulativeHistogramMetricName, pdata.MetricTypeHistogram) +func initCumulativeHistogramMetric(hm dataold.Metric) { + initMetricDescriptor(hm.MetricDescriptor(), TestCumulativeHistogramMetricName, dataold.MetricTypeHistogram) hdps := hm.HistogramDataPoints() hdps.Resize(2) @@ -546,7 +545,7 @@ func initCumulativeHistogramMetric(hm pdata.Metric) { func generateOtlpCumulativeHistogramMetric() *otlpmetrics.Metric { return &otlpmetrics.Metric{ - MetricDescriptor: generateOtlpMetricDescriptor(TestCumulativeHistogramMetricName, pdata.MetricTypeHistogram), + MetricDescriptor: generateOtlpMetricDescriptor(TestCumulativeHistogramMetricName, dataold.MetricTypeHistogram), HistogramDataPoints: []*otlpmetrics.HistogramDataPoint{ { Labels: generateOtlpMetricLabels13(), @@ -580,8 +579,8 @@ func generateOtlpCumulativeHistogramMetric() *otlpmetrics.Metric { } } -func initSummaryMetric(sm pdata.Metric) { - initMetricDescriptor(sm.MetricDescriptor(), TestSummaryMetricName, pdata.MetricTypeSummary) +func initSummaryMetric(sm dataold.Metric) { + initMetricDescriptor(sm.MetricDescriptor(), TestSummaryMetricName, dataold.MetricTypeSummary) sdps := sm.SummaryDataPoints() sdps.Resize(2) @@ -604,7 +603,7 @@ func initSummaryMetric(sm pdata.Metric) { func generateOtlpSummaryMetric() *otlpmetrics.Metric { return &otlpmetrics.Metric{ - MetricDescriptor: generateOtlpMetricDescriptor(TestSummaryMetricName, pdata.MetricTypeSummary), + MetricDescriptor: generateOtlpMetricDescriptor(TestSummaryMetricName, dataold.MetricTypeSummary), SummaryDataPoints: []*otlpmetrics.SummaryDataPoint{ { Labels: generateOtlpMetricLabelValue1(), @@ -630,7 +629,7 @@ func generateOtlpSummaryMetric() *otlpmetrics.Metric { } } -func initMetricDescriptor(md pdata.MetricDescriptor, name string, ty pdata.MetricType) { +func initMetricDescriptor(md dataold.MetricDescriptor, name string, ty dataold.MetricType) { md.InitEmpty() md.SetName(name) md.SetDescription("") @@ -638,7 +637,7 @@ func initMetricDescriptor(md pdata.MetricDescriptor, name string, ty pdata.Metri md.SetType(ty) } -func generateOtlpMetricDescriptor(name string, ty pdata.MetricType) *otlpmetrics.MetricDescriptor { +func generateOtlpMetricDescriptor(name string, ty dataold.MetricType) *otlpmetrics.MetricDescriptor { return &otlpmetrics.MetricDescriptor{ Name: name, Description: "", @@ -647,7 +646,7 @@ func generateOtlpMetricDescriptor(name string, ty pdata.MetricType) *otlpmetrics } } -func GenerateMetricDataManyMetricsSameResource(metricsCount int) data.MetricData { +func GenerateMetricDataManyMetricsSameResource(metricsCount int) dataold.MetricData { md := GenerateMetricDataOneEmptyInstrumentationLibrary() rs0ilm0 := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0) rs0ilm0.Metrics().Resize(metricsCount) diff --git a/internal/data/testdata/metric_test.go b/internal/dataold/testdataold/metric_test.go similarity index 95% rename from internal/data/testdata/metric_test.go rename to internal/dataold/testdataold/metric_test.go index b48c5df832c..678040ebaa4 100644 --- a/internal/data/testdata/metric_test.go +++ b/internal/dataold/testdataold/metric_test.go @@ -12,21 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -package testdata +package testdataold import ( "testing" "github.com/stretchr/testify/assert" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" - - "go.opentelemetry.io/collector/internal/data" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" + "go.opentelemetry.io/collector/internal/dataold" ) type traceMetricsCase struct { name string - td data.MetricData + td dataold.MetricData otlp []*otlpmetrics.ResourceMetrics } @@ -112,9 +111,9 @@ func TestToFromOtlpMetrics(t *testing.T) { for i := range allTestCases { test := allTestCases[i] t.Run(test.name, func(t *testing.T) { - td := data.MetricDataFromOtlp(test.otlp) + td := dataold.MetricDataFromOtlp(test.otlp) assert.EqualValues(t, test.td, td) - otlp := data.MetricDataToOtlp(td) + otlp := dataold.MetricDataToOtlp(td) assert.EqualValues(t, test.otlp, otlp) }) } diff --git a/internal/goldendataset/metric_gen.go b/internal/goldendataset/metric_gen.go index dafd3b76630..307b8eeee21 100644 --- a/internal/goldendataset/metric_gen.go +++ b/internal/goldendataset/metric_gen.go @@ -18,7 +18,7 @@ import ( "fmt" "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) // Simple utilities for generating metrics for testing @@ -27,7 +27,7 @@ import ( // metrics with the corresponding number/type of attributes and pass into MetricDataFromCfg to generate metrics. type MetricCfg struct { // The type of metric to generate - MetricDescriptorType pdata.MetricType + MetricDescriptorType dataold.MetricType // A prefix for every metric name MetricNamePrefix string // The number of instrumentation library metrics per resource @@ -54,7 +54,7 @@ type MetricCfg struct { // (but boring) metrics, and can be used as a starting point for making alterations. func DefaultCfg() MetricCfg { return MetricCfg{ - MetricDescriptorType: pdata.MetricTypeInt64, + MetricDescriptorType: dataold.MetricTypeInt64, MetricNamePrefix: "", NumILMPerResource: 1, NumMetricsPerILM: 1, @@ -69,12 +69,12 @@ func DefaultCfg() MetricCfg { } // DefaultMetricData produces MetricData with a default config. -func DefaultMetricData() data.MetricData { +func DefaultMetricData() dataold.MetricData { return MetricDataFromCfg(DefaultCfg()) } // MetricDataFromCfg produces MetricData with the passed-in config. -func MetricDataFromCfg(cfg MetricCfg) data.MetricData { +func MetricDataFromCfg(cfg MetricCfg) dataold.MetricData { return newMetricGenerator().genMetricDataFromCfg(cfg) } @@ -86,8 +86,8 @@ func newMetricGenerator() *metricGenerator { return &metricGenerator{} } -func (g *metricGenerator) genMetricDataFromCfg(cfg MetricCfg) data.MetricData { - md := data.NewMetricData() +func (g *metricGenerator) genMetricDataFromCfg(cfg MetricCfg) dataold.MetricData { + md := dataold.NewMetricData() rms := md.ResourceMetrics() rms.Resize(cfg.NumResourceMetrics) for i := 0; i < cfg.NumResourceMetrics; i++ { @@ -105,7 +105,7 @@ func (g *metricGenerator) genMetricDataFromCfg(cfg MetricCfg) data.MetricData { return md } -func (g *metricGenerator) populateIlm(cfg MetricCfg, rm pdata.ResourceMetrics) { +func (g *metricGenerator) populateIlm(cfg MetricCfg, rm dataold.ResourceMetrics) { ilms := rm.InstrumentationLibraryMetrics() ilms.Resize(cfg.NumILMPerResource) for i := 0; i < cfg.NumILMPerResource; i++ { @@ -114,7 +114,7 @@ func (g *metricGenerator) populateIlm(cfg MetricCfg, rm pdata.ResourceMetrics) { } } -func (g *metricGenerator) populateMetrics(cfg MetricCfg, ilm pdata.InstrumentationLibraryMetrics) { +func (g *metricGenerator) populateMetrics(cfg MetricCfg, ilm dataold.InstrumentationLibraryMetrics) { metrics := ilm.Metrics() metrics.Resize(cfg.NumMetricsPerILM) for i := 0; i < cfg.NumMetricsPerILM; i++ { @@ -122,19 +122,19 @@ func (g *metricGenerator) populateMetrics(cfg MetricCfg, ilm pdata.Instrumentati metric.InitEmpty() g.populateMetricDesc(cfg, metric) switch cfg.MetricDescriptorType { - case pdata.MetricTypeInt64, pdata.MetricTypeMonotonicInt64: + case dataold.MetricTypeInt64, dataold.MetricTypeMonotonicInt64: populateIntPoints(cfg, metric) - case pdata.MetricTypeDouble, pdata.MetricTypeMonotonicDouble: + case dataold.MetricTypeDouble, dataold.MetricTypeMonotonicDouble: populateDblPoints(cfg, metric) - case pdata.MetricTypeHistogram: + case dataold.MetricTypeHistogram: populateHistogramPoints(cfg, metric) - case pdata.MetricTypeSummary: + case dataold.MetricTypeSummary: populateSummaryPoints(cfg, metric) } } } -func (g *metricGenerator) populateMetricDesc(cfg MetricCfg, metric pdata.Metric) { +func (g *metricGenerator) populateMetricDesc(cfg MetricCfg, metric dataold.Metric) { desc := metric.MetricDescriptor() desc.InitEmpty() desc.SetName(fmt.Sprintf("%smetric_%d", cfg.MetricNamePrefix, g.metricID)) @@ -144,7 +144,7 @@ func (g *metricGenerator) populateMetricDesc(cfg MetricCfg, metric pdata.Metric) desc.SetType(cfg.MetricDescriptorType) } -func populateIntPoints(cfg MetricCfg, metric pdata.Metric) { +func populateIntPoints(cfg MetricCfg, metric dataold.Metric) { pts := metric.Int64DataPoints() pts.Resize(cfg.NumPtsPerMetric) for i := 0; i < cfg.NumPtsPerMetric; i++ { @@ -156,7 +156,7 @@ func populateIntPoints(cfg MetricCfg, metric pdata.Metric) { } } -func populateDblPoints(cfg MetricCfg, metric pdata.Metric) { +func populateDblPoints(cfg MetricCfg, metric dataold.Metric) { pts := metric.DoubleDataPoints() pts.Resize(cfg.NumPtsPerMetric) for i := 0; i < cfg.NumPtsPerMetric; i++ { @@ -168,7 +168,7 @@ func populateDblPoints(cfg MetricCfg, metric pdata.Metric) { } } -func populateHistogramPoints(cfg MetricCfg, metric pdata.Metric) { +func populateHistogramPoints(cfg MetricCfg, metric dataold.Metric) { pts := metric.HistogramDataPoints() pts.Resize(cfg.NumPtsPerMetric) for i := 0; i < cfg.NumPtsPerMetric; i++ { @@ -186,12 +186,12 @@ func populateHistogramPoints(cfg MetricCfg, metric pdata.Metric) { } } -func setHistogramBounds(hdp pdata.HistogramDataPoint, bounds ...float64) { +func setHistogramBounds(hdp dataold.HistogramDataPoint, bounds ...float64) { hdp.Buckets().Resize(len(bounds)) hdp.SetExplicitBounds(bounds) } -func addHistogramVal(hdp pdata.HistogramDataPoint, val float64, ts pdata.TimestampUnixNano) { +func addHistogramVal(hdp dataold.HistogramDataPoint, val float64, ts pdata.TimestampUnixNano) { hdp.SetCount(hdp.Count() + 1) hdp.SetSum(hdp.Sum() + val) buckets := hdp.Buckets() @@ -210,7 +210,7 @@ func addHistogramVal(hdp pdata.HistogramDataPoint, val float64, ts pdata.Timesta } } -func populateSummaryPoints(cfg MetricCfg, metric pdata.Metric) { +func populateSummaryPoints(cfg MetricCfg, metric dataold.Metric) { pts := metric.SummaryDataPoints() pts.Resize(cfg.NumPtsPerMetric) for i := 0; i < cfg.NumPtsPerMetric; i++ { @@ -227,7 +227,7 @@ func populateSummaryPoints(cfg MetricCfg, metric pdata.Metric) { } } -func setSummaryPercentiles(pt pdata.SummaryDataPoint, pctiles ...float64) { +func setSummaryPercentiles(pt dataold.SummaryDataPoint, pctiles ...float64) { vap := pt.ValueAtPercentiles() l := len(pctiles) vap.Resize(l) @@ -236,7 +236,7 @@ func setSummaryPercentiles(pt pdata.SummaryDataPoint, pctiles ...float64) { } } -func addSummaryValue(pt pdata.SummaryDataPoint, value float64, pctileIndex int) { +func addSummaryValue(pt dataold.SummaryDataPoint, value float64, pctileIndex int) { pt.SetCount(pt.Count() + 1) pt.SetSum(pt.Sum() + value) vap := pt.ValueAtPercentiles().At(pctileIndex) diff --git a/internal/goldendataset/metric_gen_test.go b/internal/goldendataset/metric_gen_test.go index 5b1e5ae2709..2235bb44a47 100644 --- a/internal/goldendataset/metric_gen_test.go +++ b/internal/goldendataset/metric_gen_test.go @@ -19,8 +19,7 @@ import ( "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) func TestGenDefault(t *testing.T) { @@ -60,7 +59,7 @@ func TestGenDefault(t *testing.T) { } func TestHistogramFunctions(t *testing.T) { - pt := pdata.NewHistogramDataPoint() + pt := dataold.NewHistogramDataPoint() pt.InitEmpty() setHistogramBounds(pt, 1, 2, 3, 4, 5) require.Equal(t, 5, len(pt.ExplicitBounds())) @@ -84,7 +83,7 @@ func TestHistogramFunctions(t *testing.T) { func TestGenHistogram(t *testing.T) { cfg := DefaultCfg() - cfg.MetricDescriptorType = pdata.MetricTypeHistogram + cfg.MetricDescriptorType = dataold.MetricTypeHistogram cfg.PtVal = 2 md := MetricDataFromCfg(cfg) pts := getMetric(md).HistogramDataPoints() @@ -99,7 +98,7 @@ func TestGenHistogram(t *testing.T) { } func TestSummaryFunctions(t *testing.T) { - pt := pdata.NewSummaryDataPoint() + pt := dataold.NewSummaryDataPoint() pt.InitEmpty() setSummaryPercentiles(pt, 0, 50, 95) addSummaryValue(pt, 55, 0) @@ -114,7 +113,7 @@ func TestSummaryFunctions(t *testing.T) { func TestGenSummary(t *testing.T) { cfg := DefaultCfg() - cfg.MetricDescriptorType = pdata.MetricTypeSummary + cfg.MetricDescriptorType = dataold.MetricTypeSummary md := MetricDataFromCfg(cfg) metric := getMetric(md) pts := metric.SummaryDataPoints() @@ -126,7 +125,7 @@ func TestGenSummary(t *testing.T) { func TestGenDouble(t *testing.T) { cfg := DefaultCfg() - cfg.MetricDescriptorType = pdata.MetricTypeDouble + cfg.MetricDescriptorType = dataold.MetricTypeDouble md := MetricDataFromCfg(cfg) metric := getMetric(md) pts := metric.DoubleDataPoints() @@ -135,6 +134,6 @@ func TestGenDouble(t *testing.T) { require.EqualValues(t, 1, pt.Value()) } -func getMetric(md data.MetricData) pdata.Metric { +func getMetric(md dataold.MetricData) dataold.Metric { return md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0) } diff --git a/internal/goldendataset/pict_metric_gen.go b/internal/goldendataset/pict_metric_gen.go index dc84469e528..756ac196939 100644 --- a/internal/goldendataset/pict_metric_gen.go +++ b/internal/goldendataset/pict_metric_gen.go @@ -17,18 +17,17 @@ package goldendataset import ( "fmt" - "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) // GenerateMetricDatas takes the filename of a PICT-generated file, walks through all of the rows in the PICT // file and for each row, generates a MetricData object, collecting them and returning them to the caller. -func GenerateMetricDatas(metricPairsFile string) ([]data.MetricData, error) { +func GenerateMetricDatas(metricPairsFile string) ([]dataold.MetricData, error) { pictData, err := loadPictOutputFile(metricPairsFile) if err != nil { return nil, err } - var out []data.MetricData + var out []dataold.MetricData for i, values := range pictData { if i == 0 { continue @@ -66,17 +65,17 @@ func pictToCfg(inputs PICTMetricInputs) MetricCfg { switch inputs.MetricType { case MetricTypeInt: - cfg.MetricDescriptorType = pdata.MetricTypeInt64 + cfg.MetricDescriptorType = dataold.MetricTypeInt64 case MetricTypeMonotonicInt: - cfg.MetricDescriptorType = pdata.MetricTypeMonotonicInt64 + cfg.MetricDescriptorType = dataold.MetricTypeMonotonicInt64 case MetricTypeDouble: - cfg.MetricDescriptorType = pdata.MetricTypeDouble + cfg.MetricDescriptorType = dataold.MetricTypeDouble case MetricTypeMonotonicDouble: - cfg.MetricDescriptorType = pdata.MetricTypeMonotonicDouble + cfg.MetricDescriptorType = dataold.MetricTypeMonotonicDouble case MetricTypeHistogram: - cfg.MetricDescriptorType = pdata.MetricTypeHistogram + cfg.MetricDescriptorType = dataold.MetricTypeHistogram case MetricTypeSummary: - cfg.MetricDescriptorType = pdata.MetricTypeSummary + cfg.MetricDescriptorType = dataold.MetricTypeSummary } switch inputs.NumPtLabels { diff --git a/internal/goldendataset/pict_metric_gen_test.go b/internal/goldendataset/pict_metric_gen_test.go index d755d13689e..ee6924dd864 100644 --- a/internal/goldendataset/pict_metric_gen_test.go +++ b/internal/goldendataset/pict_metric_gen_test.go @@ -19,7 +19,7 @@ import ( "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) func TestGenerateMetricDatas(t *testing.T) { @@ -45,7 +45,7 @@ func TestPICTtoCfg(t *testing.T) { cfg: MetricCfg{ NumResourceAttrs: 0, NumPtsPerMetric: 1, - MetricDescriptorType: pdata.MetricTypeInt64, + MetricDescriptorType: dataold.MetricTypeInt64, NumPtLabels: 0, }, }, @@ -60,7 +60,7 @@ func TestPICTtoCfg(t *testing.T) { cfg: MetricCfg{ NumResourceAttrs: 1, NumPtsPerMetric: 1, - MetricDescriptorType: pdata.MetricTypeDouble, + MetricDescriptorType: dataold.MetricTypeDouble, NumPtLabels: 1, }, }, @@ -75,7 +75,7 @@ func TestPICTtoCfg(t *testing.T) { cfg: MetricCfg{ NumResourceAttrs: 2, NumPtsPerMetric: 16, - MetricDescriptorType: pdata.MetricTypeSummary, + MetricDescriptorType: dataold.MetricTypeSummary, NumPtLabels: 16, }, }, diff --git a/processor/batchprocessor/batch_processor.go b/processor/batchprocessor/batch_processor.go index e7012e57f40..d8487fcda86 100644 --- a/processor/batchprocessor/batch_processor.go +++ b/processor/batchprocessor/batch_processor.go @@ -28,7 +28,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/collector/telemetry" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/processor" ) @@ -236,7 +236,7 @@ func (bt *batchTraces) reset() { type batchMetrics struct { nextConsumer consumer.MetricsConsumer - metricData data.MetricData + metricData dataold.MetricData metricCount uint32 } @@ -260,7 +260,7 @@ func (bm *batchMetrics) size() int { // resets the current batchMetrics structure with zero/empty values. func (bm *batchMetrics) reset() { - bm.metricData = data.NewMetricData() + bm.metricData = dataold.NewMetricData() bm.metricCount = 0 } diff --git a/processor/batchprocessor/batch_processor_test.go b/processor/batchprocessor/batch_processor_test.go index 16168fc61ae..891aa9359a7 100644 --- a/processor/batchprocessor/batch_processor_test.go +++ b/processor/batchprocessor/batch_processor_test.go @@ -31,8 +31,9 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" "go.opentelemetry.io/collector/internal/collector/telemetry" - "go.opentelemetry.io/collector/internal/data" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestBatchProcessorSpansDelivered(t *testing.T) { @@ -267,10 +268,10 @@ func TestBatchMetricProcessor_ReceivingData(t *testing.T) { batcher := newBatchMetricsProcessor(createParams, sink, &cfg, telemetry.Detailed) require.NoError(t, batcher.Start(context.Background(), componenttest.NewNopHost())) - metricDataSlice := make([]data.MetricData, 0, requestCount) + metricDataSlice := make([]dataold.MetricData, 0, requestCount) for requestNum := 0; requestNum < requestCount; requestNum++ { - md := testdata.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) + md := testdataold.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) metrics := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics() for metricIndex := 0; metricIndex < metricsPerRequest; metricIndex++ { metrics.At(metricIndex).MetricDescriptor().SetName(getTestMetricName(requestNum, metricIndex)) @@ -281,7 +282,7 @@ func TestBatchMetricProcessor_ReceivingData(t *testing.T) { } // Added to test case with empty resources sent. - md := testdata.GenerateMetricDataEmpty() + md := testdataold.GenerateMetricDataEmpty() assert.NoError(t, batcher.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(md))) require.NoError(t, batcher.Shutdown(context.Background())) @@ -322,7 +323,7 @@ func TestBatchMetricProcessor_BatchSize(t *testing.T) { start := time.Now() size := 0 for requestNum := 0; requestNum < requestCount; requestNum++ { - md := testdata.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) + md := testdataold.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) size += md.Size() pd := pdatautil.MetricsFromInternalMetrics(md) assert.NoError(t, batcher.ConsumeMetrics(context.Background(), pd)) @@ -378,7 +379,7 @@ func TestBatchMetricsProcessor_Timeout(t *testing.T) { start := time.Now() for requestNum := 0; requestNum < requestCount; requestNum++ { - md := testdata.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) + md := testdataold.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) pd := pdatautil.MetricsFromInternalMetrics(md) assert.NoError(t, batcher.ConsumeMetrics(context.Background(), pd)) } @@ -426,7 +427,7 @@ func TestBatchMetricProcessor_Shutdown(t *testing.T) { require.NoError(t, batcher.Start(context.Background(), componenttest.NewNopHost())) for requestNum := 0; requestNum < requestCount; requestNum++ { - md := testdata.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) + md := testdataold.GenerateMetricDataManyMetricsSameResource(metricsPerRequest) pd := pdatautil.MetricsFromInternalMetrics(md) assert.NoError(t, batcher.ConsumeMetrics(context.Background(), pd)) } @@ -469,8 +470,8 @@ func spansReceivedByName(tds []pdata.Traces) map[string]pdata.Span { return spansReceivedByName } -func metricsReceivedByName(mds []pdata.Metrics) map[string]pdata.Metric { - metricsReceivedByName := map[string]pdata.Metric{} +func metricsReceivedByName(mds []pdata.Metrics) map[string]dataold.Metric { + metricsReceivedByName := map[string]dataold.Metric{} for i := range mds { im := pdatautil.MetricsToInternalMetrics(mds[i]) rms := im.ResourceMetrics() diff --git a/processor/cloningfanoutconnector_test.go b/processor/cloningfanoutconnector_test.go index 707e3f87500..ebcdef04d40 100644 --- a/processor/cloningfanoutconnector_test.go +++ b/processor/cloningfanoutconnector_test.go @@ -24,6 +24,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestTraceProcessorCloningNotMultiplexing(t *testing.T) { @@ -82,7 +83,7 @@ func TestMetricsProcessorCloningMultiplexing(t *testing.T) { } mfc := NewMetricsCloningFanOutConnector(processors) - md := testdata.GenerateMetricDataWithCountersHistogramAndSummary() + md := testdataold.GenerateMetricDataWithCountersHistogramAndSummary() var wantMetricsCount = 0 for i := 0; i < 2; i++ { diff --git a/processor/fanoutconnector_test.go b/processor/fanoutconnector_test.go index b47e524f2b7..174a0ec9de8 100644 --- a/processor/fanoutconnector_test.go +++ b/processor/fanoutconnector_test.go @@ -25,6 +25,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestTracesProcessorNotMultiplexing(t *testing.T) { @@ -99,7 +100,7 @@ func TestMetricsProcessorMultiplexing(t *testing.T) { } mfc := NewMetricsFanOutConnector(processors) - md := testdata.GenerateMetricDataOneMetric() + md := testdataold.GenerateMetricDataOneMetric() var wantMetricsCount = 0 for i := 0; i < 2; i++ { @@ -128,7 +129,7 @@ func TestMetricsProcessorWhenOneErrors(t *testing.T) { processors[1].(*exportertest.SinkMetricsExporter).SetConsumeMetricsError(errors.New("my_error")) mfc := NewMetricsFanOutConnector(processors) - md := testdata.GenerateMetricDataOneMetric() + md := testdataold.GenerateMetricDataOneMetric() var wantMetricsCount = 0 for i := 0; i < 2; i++ { diff --git a/processor/memorylimiter/memorylimiter_test.go b/processor/memorylimiter/memorylimiter_test.go index 485c0471ba0..d93120f5a9a 100644 --- a/processor/memorylimiter/memorylimiter_test.go +++ b/processor/memorylimiter/memorylimiter_test.go @@ -29,7 +29,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/processor/processorhelper" ) @@ -122,7 +122,7 @@ func TestMetricsMemoryPressureResponse(t *testing.T) { require.NoError(t, err) ctx := context.Background() - md := data.NewMetricData() + md := dataold.NewMetricData() // Below memAllocLimit. currentMemAlloc = 800 diff --git a/processor/processorhelper/processor_test.go b/processor/processorhelper/processor_test.go index 9215e1ba6d1..bdabc8924b0 100644 --- a/processor/processorhelper/processor_test.go +++ b/processor/processorhelper/processor_test.go @@ -30,6 +30,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) const testFullName = "testFullName" @@ -110,7 +111,7 @@ func TestNewMetricsExporter(t *testing.T) { require.NoError(t, err) assert.NoError(t, me.Start(context.Background(), componenttest.NewNopHost())) - assert.NoError(t, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataEmpty()))) + assert.NoError(t, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataEmpty()))) assert.NoError(t, me.Shutdown(context.Background())) } @@ -126,13 +127,13 @@ func TestNewMetricsExporter_ProcessMetricsError(t *testing.T) { want := errors.New("my_error") me, err := NewMetricsProcessor(testCfg, exportertest.NewNopMetricsExporter(), newTestMProcessor(want)) require.NoError(t, err) - assert.Equal(t, want, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataEmpty()))) + assert.Equal(t, want, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataEmpty()))) } func TestNewMetricsExporter_ProcessMetricsErrSkipProcessingData(t *testing.T) { me, err := NewMetricsProcessor(testCfg, exportertest.NewNopMetricsExporter(), newTestMProcessor(ErrSkipProcessingData)) require.NoError(t, err) - assert.Equal(t, nil, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataEmpty()))) + assert.Equal(t, nil, me.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataEmpty()))) } func TestNewLogsExporter(t *testing.T) { diff --git a/processor/queuedprocessor/queued_processor_test.go b/processor/queuedprocessor/queued_processor_test.go index 417752962ee..595cbe4c3dc 100644 --- a/processor/queuedprocessor/queued_processor_test.go +++ b/processor/queuedprocessor/queued_processor_test.go @@ -36,6 +36,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/collector/telemetry" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/obsreport/obsreporttest" "go.opentelemetry.io/collector/processor" ) @@ -187,7 +188,7 @@ func TestMetricsQueueProcessor_NoEnqueueOnPermanentError(t *testing.T) { require.NoError(t, err) defer doneFn() - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataTwoMetrics()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataTwoMetrics()) mockP := newMockConcurrentSpanProcessor() mockP.updateError(consumererror.Permanent(errors.New("bad data"))) @@ -219,7 +220,7 @@ func TestMetricsQueueProcessor_NoEnqueueOnNoRetry(t *testing.T) { require.NoError(t, err) defer doneFn() - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataTwoMetrics()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataTwoMetrics()) mockP := newMockConcurrentSpanProcessor() mockP.updateError(errors.New("transient error")) @@ -251,7 +252,7 @@ func TestMetricsQueueProcessor_EnqueueOnError(t *testing.T) { require.NoError(t, err) defer doneFn() - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataTwoMetrics()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataTwoMetrics()) mockP := newMockConcurrentSpanProcessor() mockP.updateError(errors.New("transient error")) @@ -350,7 +351,7 @@ func TestMetricsQueueProcessorHappyPath(t *testing.T) { wantBatches := 10 wantMetricPoints := 2 * 20 for i := 0; i < wantBatches; i++ { - md := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataTwoMetrics()) + md := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataTwoMetrics()) mockP.run(func() { require.NoError(t, qp.ConsumeMetrics(context.Background(), md)) }) diff --git a/processor/resourceprocessor/resource_processor_test.go b/processor/resourceprocessor/resource_processor_test.go index d144756219b..6c662a58781 100644 --- a/processor/resourceprocessor/resource_processor_test.go +++ b/processor/resourceprocessor/resource_processor_test.go @@ -26,6 +26,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/processor/processorhelper" ) @@ -146,7 +147,7 @@ func generateTraceData(attributes map[string]string) pdata.Traces { } func generateMetricData(attributes map[string]string) pdata.Metrics { - md := testdata.GenerateMetricDataOneMetricNoResource() + md := testdataold.GenerateMetricDataOneMetricNoResource() if attributes == nil { return pdatautil.MetricsFromInternalMetrics(md) } diff --git a/receiver/hostmetricsreceiver/hostmetrics_receiver.go b/receiver/hostmetricsreceiver/hostmetrics_receiver.go index 30642432a24..5b5271058e5 100644 --- a/receiver/hostmetricsreceiver/hostmetrics_receiver.go +++ b/receiver/hostmetricsreceiver/hostmetrics_receiver.go @@ -26,7 +26,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -162,7 +162,7 @@ func (hmr *receiver) scrapeMetrics(ctx context.Context) { defer span.End() var errors []error - metricData := data.NewMetricData() + metricData := dataold.NewMetricData() if err := hmr.scrapeAndAppendHostMetrics(ctx, metricData); err != nil { errors = append(errors, err) @@ -182,7 +182,7 @@ func (hmr *receiver) scrapeMetrics(ctx context.Context) { } } -func (hmr *receiver) scrapeAndAppendHostMetrics(ctx context.Context, metricData data.MetricData) error { +func (hmr *receiver) scrapeAndAppendHostMetrics(ctx context.Context, metricData dataold.MetricData) error { if len(hmr.hostMetricScrapers) == 0 { return nil } @@ -202,7 +202,7 @@ func (hmr *receiver) scrapeAndAppendHostMetrics(ctx context.Context, metricData return componenterror.CombineErrors(errors) } -func (hmr *receiver) scrapeAndAppendResourceMetrics(ctx context.Context, metricData data.MetricData) error { +func (hmr *receiver) scrapeAndAppendResourceMetrics(ctx context.Context, metricData dataold.MetricData) error { if len(hmr.resourceMetricScrapers) == 0 { return nil } diff --git a/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go b/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go index 8265f92e237..42fd3351fed 100644 --- a/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go +++ b/receiver/hostmetricsreceiver/hostmetrics_receiver_test.go @@ -30,6 +30,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/scraper/cpuscraper" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/scraper/diskscraper" @@ -182,13 +183,13 @@ func assertIncludesResourceMetrics(t *testing.T, got pdata.Metrics) { } } -func getMetricSlice(t *testing.T, rm pdata.ResourceMetrics) pdata.MetricSlice { +func getMetricSlice(t *testing.T, rm dataold.ResourceMetrics) dataold.MetricSlice { ilms := rm.InstrumentationLibraryMetrics() require.Equal(t, 1, ilms.Len()) return ilms.At(0).Metrics() } -func getReturnedMetricNames(metrics pdata.MetricSlice) map[string]struct{} { +func getReturnedMetricNames(metrics dataold.MetricSlice) map[string]struct{} { metricNames := make(map[string]struct{}) for i := 0; i < metrics.Len(); i++ { metricNames[metrics.At(i).MetricDescriptor().Name()] = struct{}{} @@ -218,8 +219,8 @@ func (m *mockFactory) CreateMetricsScraper(ctx context.Context, logger *zap.Logg func (m *mockScraper) Initialize(ctx context.Context) error { return nil } func (m *mockScraper) Close(ctx context.Context) error { return nil } -func (m *mockScraper) ScrapeMetrics(ctx context.Context) (pdata.MetricSlice, error) { - return pdata.NewMetricSlice(), errors.New("err1") +func (m *mockScraper) ScrapeMetrics(ctx context.Context) (dataold.MetricSlice, error) { + return dataold.NewMetricSlice(), errors.New("err1") } type mockResourceFactory struct{ mock.Mock } @@ -233,8 +234,8 @@ func (m *mockResourceFactory) CreateMetricsScraper(ctx context.Context, logger * func (m *mockResourceScraper) Initialize(ctx context.Context) error { return nil } func (m *mockResourceScraper) Close(ctx context.Context) error { return nil } -func (m *mockResourceScraper) ScrapeMetrics(ctx context.Context) (pdata.ResourceMetricsSlice, error) { - return pdata.NewResourceMetricsSlice(), errors.New("err2") +func (m *mockResourceScraper) ScrapeMetrics(ctx context.Context) (dataold.ResourceMetricsSlice, error) { + return dataold.NewResourceMetricsSlice(), errors.New("err2") } func TestGatherMetrics_ScraperKeyConfigError(t *testing.T) { diff --git a/receiver/hostmetricsreceiver/internal/scraper.go b/receiver/hostmetricsreceiver/internal/scraper.go index c7cd62b9936..42fc4e101f9 100644 --- a/receiver/hostmetricsreceiver/internal/scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper.go @@ -19,7 +19,7 @@ import ( "go.uber.org/zap" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // BaseScraper gathers metrics from the host machine. @@ -45,7 +45,7 @@ type Scraper interface { // ScrapeMetrics returns relevant scraped metrics. If errors occur // scraping some metrics, an error should be returned, but any // metrics that were successfully scraped should still be returned. - ScrapeMetrics(ctx context.Context) (pdata.MetricSlice, error) + ScrapeMetrics(ctx context.Context) (dataold.MetricSlice, error) } // ScraperFactory can create a MetricScraper. @@ -66,7 +66,7 @@ type ResourceScraper interface { // If errors occur scraping some metrics, an error should be // returned, but any metrics that were successfully scraped // should still be returned. - ScrapeMetrics(ctx context.Context) (pdata.ResourceMetricsSlice, error) + ScrapeMetrics(ctx context.Context) (dataold.ResourceMetricsSlice, error) } // ResourceScraperFactory can create a ResourceScraper. diff --git a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_metadata.go index 990115d57d0..e9c83786986 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_metadata.go @@ -15,7 +15,7 @@ package cpuscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // labels @@ -40,12 +40,12 @@ const ( // descriptors -var cpuTimeDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var cpuTimeDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.cpu.time") descriptor.SetDescription("Total CPU seconds broken down by different states.") descriptor.SetUnit("s") - descriptor.SetType(pdata.MetricTypeMonotonicDouble) + descriptor.SetType(dataold.MetricTypeMonotonicDouble) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper.go index f0a1ab0c1a6..6c74aa7a29b 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper.go @@ -22,6 +22,7 @@ import ( "github.com/shirou/gopsutil/host" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -57,8 +58,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() now := internal.TimeToUnixNano(time.Now()) cpuTimes, err := s.times( /*percpu=*/ true) @@ -71,7 +72,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, nil } -func initializeCPUTimeMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, cpuTimes []cpu.TimesStat) { +func initializeCPUTimeMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, cpuTimes []cpu.TimesStat) { cpuTimeDescriptor.CopyTo(metric.MetricDescriptor()) ddps := metric.DoubleDataPoints() @@ -83,7 +84,7 @@ func initializeCPUTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp const gopsCPUTotal string = "cpu-total" -func initializeCPUTimeDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, cpuLabel string, stateLabel string, value float64) { +func initializeCPUTimeDataPoint(dataPoint dataold.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, cpuLabel string, stateLabel string, value float64) { labelsMap := dataPoint.LabelsMap() // ignore cpu label if reporting "total" cpu usage if cpuLabel != gopsCPUTotal { diff --git a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_linux.go b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_linux.go index 33352128765..8ae3591435c 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_linux.go +++ b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_linux.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/cpu" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const cpuStatesLen = 8 -func appendCPUTimeStateDataPoints(ddps pdata.DoubleDataPointSlice, startIdx int, startTime, now pdata.TimestampUnixNano, cpuTime cpu.TimesStat) { +func appendCPUTimeStateDataPoints(ddps dataold.DoubleDataPointSlice, startIdx int, startTime, now pdata.TimestampUnixNano, cpuTime cpu.TimesStat) { initializeCPUTimeDataPoint(ddps.At(startIdx+0), startTime, now, cpuTime.CPU, userStateLabelValue, cpuTime.User) initializeCPUTimeDataPoint(ddps.At(startIdx+1), startTime, now, cpuTime.CPU, systemStateLabelValue, cpuTime.System) initializeCPUTimeDataPoint(ddps.At(startIdx+2), startTime, now, cpuTime.CPU, idleStateLabelValue, cpuTime.Idle) diff --git a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_others.go b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_others.go index 804d825dd5b..bf03555cf5e 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_others.go +++ b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_others.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/cpu" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const cpuStatesLen = 4 -func appendCPUTimeStateDataPoints(ddps pdata.DoubleDataPointSlice, startIdx int, startTime, now pdata.TimestampUnixNano, cpuTime cpu.TimesStat) { +func appendCPUTimeStateDataPoints(ddps dataold.DoubleDataPointSlice, startIdx int, startTime, now pdata.TimestampUnixNano, cpuTime cpu.TimesStat) { initializeCPUTimeDataPoint(ddps.At(startIdx+0), startTime, now, cpuTime.CPU, userStateLabelValue, cpuTime.User) initializeCPUTimeDataPoint(ddps.At(startIdx+1), startTime, now, cpuTime.CPU, systemStateLabelValue, cpuTime.System) initializeCPUTimeDataPoint(ddps.At(startIdx+2), startTime, now, cpuTime.CPU, idleStateLabelValue, cpuTime.Idle) diff --git a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_test.go index 240298aaf80..d25e8c22755 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/cpu_scraper_test.go @@ -25,6 +25,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -97,7 +98,7 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertCPUMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.MetricDescriptor, startTime pdata.TimestampUnixNano) { +func assertCPUMetricValid(t *testing.T, metric dataold.Metric, descriptor dataold.MetricDescriptor, startTime pdata.TimestampUnixNano) { internal.AssertDescriptorEqual(t, descriptor, metric.MetricDescriptor()) if startTime != 0 { internal.AssertDoubleMetricStartTimeEquals(t, metric, startTime) @@ -110,7 +111,7 @@ func assertCPUMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.Me internal.AssertDoubleMetricLabelHasValue(t, metric, 3, stateLabelName, interruptStateLabelValue) } -func assertCPUMetricHasLinuxSpecificStateLabels(t *testing.T, metric pdata.Metric) { +func assertCPUMetricHasLinuxSpecificStateLabels(t *testing.T, metric dataold.Metric) { internal.AssertDoubleMetricLabelHasValue(t, metric, 4, stateLabelName, niceStateLabelValue) internal.AssertDoubleMetricLabelHasValue(t, metric, 5, stateLabelName, softIRQStateLabelValue) internal.AssertDoubleMetricLabelHasValue(t, metric, 6, stateLabelName, stealStateLabelValue) diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_metadata.go index b2cc55eea54..ff98fd1556b 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_metadata.go @@ -15,7 +15,7 @@ package diskscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // labels @@ -34,52 +34,52 @@ const ( // descriptors -var diskIODescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var diskIODescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.disk.io") descriptor.SetDescription("Disk bytes transferred.") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var diskOpsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var diskOpsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.disk.ops") descriptor.SetDescription("Disk operations count.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var diskTimeDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var diskTimeDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.disk.time") descriptor.SetDescription("Time spent in disk operations.") descriptor.SetUnit("s") - descriptor.SetType(pdata.MetricTypeMonotonicDouble) + descriptor.SetType(dataold.MetricTypeMonotonicDouble) return descriptor }() -var diskPendingOperationsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var diskPendingOperationsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.disk.pending_operations") descriptor.SetDescription("The queue size of pending I/O operations.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() -var diskMergedDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var diskMergedDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.disk.merged") descriptor.SetDescription("The number of disk reads merged into single physical disk access operations.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others.go index 6590f6a7e9f..536c60133db 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others.go @@ -25,6 +25,7 @@ import ( "github.com/shirou/gopsutil/host" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -81,8 +82,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() now := internal.TimeToUnixNano(time.Now()) ioCounters, err := s.ioCounters() @@ -105,7 +106,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, nil } -func initializeDiskIOMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { +func initializeDiskIOMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { diskIODescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -119,7 +120,7 @@ func initializeDiskIOMetric(metric pdata.Metric, startTime, now pdata.TimestampU } } -func initializeDiskOpsMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { +func initializeDiskOpsMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { diskOpsDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -133,7 +134,7 @@ func initializeDiskOpsMetric(metric pdata.Metric, startTime, now pdata.Timestamp } } -func initializeDiskTimeMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { +func initializeDiskTimeMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { diskTimeDescriptor.CopyTo(metric.MetricDescriptor()) ddps := metric.DoubleDataPoints() @@ -147,7 +148,7 @@ func initializeDiskTimeMetric(metric pdata.Metric, startTime, now pdata.Timestam } } -func initializeDiskPendingOperationsMetric(metric pdata.Metric, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { +func initializeDiskPendingOperationsMetric(metric dataold.Metric, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { diskPendingOperationsDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -160,7 +161,7 @@ func initializeDiskPendingOperationsMetric(metric pdata.Metric, now pdata.Timest } } -func initializeInt64DataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value int64) { +func initializeInt64DataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) labelsMap.Insert(directionLabelName, directionLabel) @@ -169,7 +170,7 @@ func initializeInt64DataPoint(dataPoint pdata.Int64DataPoint, startTime, now pda dataPoint.SetValue(value) } -func initializeDoubleDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value float64) { +func initializeDoubleDataPoint(dataPoint dataold.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value float64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) labelsMap.Insert(directionLabelName, directionLabel) @@ -178,7 +179,7 @@ func initializeDoubleDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now p dataPoint.SetValue(value) } -func initializeDiskPendingDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, value int64) { +func initializeDiskPendingDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) dataPoint.SetTimestamp(now) diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_fallback.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_fallback.go index 5c169a874b4..5d6571874be 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_fallback.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_fallback.go @@ -20,9 +20,10 @@ import ( "github.com/shirou/gopsutil/disk" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const systemSpecificMetricsLen = 0 -func appendSystemSpecificMetrics(metrics pdata.MetricSlice, startIdx int, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { +func appendSystemSpecificMetrics(metrics dataold.MetricSlice, startIdx int, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { } diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_linux.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_linux.go index 0e17d152e18..92b9f1cce8c 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_linux.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_others_linux.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/disk" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const systemSpecificMetricsLen = 1 -func appendSystemSpecificMetrics(metrics pdata.MetricSlice, startIdx int, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { +func appendSystemSpecificMetrics(metrics dataold.MetricSlice, startIdx int, startTime, now pdata.TimestampUnixNano, ioCounters map[string]disk.IOCountersStat) { metric := metrics.At(startIdx) diskMergedDescriptor.CopyTo(metric.MetricDescriptor()) diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_test.go index be00e2b11bc..1d064c6899c 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_test.go @@ -23,6 +23,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -95,7 +96,7 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertInt64DiskMetricValid(t *testing.T, metric pdata.Metric, expectedDescriptor pdata.MetricDescriptor, startTime pdata.TimestampUnixNano) { +func assertInt64DiskMetricValid(t *testing.T, metric dataold.Metric, expectedDescriptor dataold.MetricDescriptor, startTime pdata.TimestampUnixNano) { internal.AssertDescriptorEqual(t, expectedDescriptor, metric.MetricDescriptor()) if startTime != 0 { internal.AssertInt64MetricStartTimeEquals(t, metric, startTime) @@ -106,7 +107,7 @@ func assertInt64DiskMetricValid(t *testing.T, metric pdata.Metric, expectedDescr internal.AssertInt64MetricLabelHasValue(t, metric, 1, directionLabelName, writeDirectionLabelValue) } -func assertDoubleDiskMetricValid(t *testing.T, metric pdata.Metric, expectedDescriptor pdata.MetricDescriptor, startTime pdata.TimestampUnixNano) { +func assertDoubleDiskMetricValid(t *testing.T, metric dataold.Metric, expectedDescriptor dataold.MetricDescriptor, startTime pdata.TimestampUnixNano) { internal.AssertDescriptorEqual(t, expectedDescriptor, metric.MetricDescriptor()) if startTime != 0 { internal.AssertInt64MetricStartTimeEquals(t, metric, startTime) @@ -117,7 +118,7 @@ func assertDoubleDiskMetricValid(t *testing.T, metric pdata.Metric, expectedDesc internal.AssertDoubleMetricLabelHasValue(t, metric, metric.DoubleDataPoints().Len()-1, directionLabelName, writeDirectionLabelValue) } -func assertDiskPendingOperationsMetricValid(t *testing.T, metric pdata.Metric) { +func assertDiskPendingOperationsMetricValid(t *testing.T, metric dataold.Metric) { internal.AssertDescriptorEqual(t, diskPendingOperationsDescriptor, metric.MetricDescriptor()) assert.GreaterOrEqual(t, metric.Int64DataPoints().Len(), 1) internal.AssertInt64MetricLabelExists(t, metric, 0, deviceLabelName) diff --git a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_windows.go b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_windows.go index 9e0372f813e..ff2bdf75c34 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_windows.go +++ b/receiver/hostmetricsreceiver/internal/scraper/diskscraper/disk_scraper_windows.go @@ -22,6 +22,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/third_party/telegraf/win_perf_counters" @@ -202,13 +203,13 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { now := time.Now() durationSinceLastScraped := now.Sub(s.prevScrapeTime).Seconds() s.prevScrapeTime = now nowUnixTime := pdata.TimestampUnixNano(uint64(now.UnixNano())) - metrics := pdata.NewMetricSlice() + metrics := dataold.NewMetricSlice() var errors []error @@ -230,7 +231,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, componenterror.CombineErrors(errors) } -func (s *scraper) scrapeAndAppendDiskIOMetric(metrics pdata.MetricSlice, now pdata.TimestampUnixNano, durationSinceLastScraped float64) error { +func (s *scraper) scrapeAndAppendDiskIOMetric(metrics dataold.MetricSlice, now pdata.TimestampUnixNano, durationSinceLastScraped float64) error { diskReadBytesPerSecValues, err := s.diskReadBytesPerSecCounter.ScrapeData() if err != nil { return err @@ -263,7 +264,7 @@ func (s *scraper) scrapeAndAppendDiskIOMetric(metrics pdata.MetricSlice, now pda return nil } -func (s *scraper) scrapeAndAppendDiskOpsMetric(metrics pdata.MetricSlice, now pdata.TimestampUnixNano, durationSinceLastScraped float64) error { +func (s *scraper) scrapeAndAppendDiskOpsMetric(metrics dataold.MetricSlice, now pdata.TimestampUnixNano, durationSinceLastScraped float64) error { diskReadsPerSecValues, err := s.diskReadsPerSecCounter.ScrapeData() if err != nil { return err @@ -329,7 +330,7 @@ func (s *scraper) scrapeAndAppendDiskOpsMetric(metrics pdata.MetricSlice, now pd return nil } -func (s *scraper) scrapeAndAppendDiskPendingOperationsMetric(metrics pdata.MetricSlice, now pdata.TimestampUnixNano) error { +func (s *scraper) scrapeAndAppendDiskPendingOperationsMetric(metrics dataold.MetricSlice, now pdata.TimestampUnixNano) error { diskQueueLengthValues, err := s.diskQueueLengthCounter.ScrapeData() if err != nil { return err @@ -346,7 +347,7 @@ func (s *scraper) scrapeAndAppendDiskPendingOperationsMetric(metrics pdata.Metri return nil } -func initializeDiskInt64Metric(metric pdata.Metric, descriptor pdata.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ops cumulativeDiskValues) { +func initializeDiskInt64Metric(metric dataold.Metric, descriptor dataold.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ops cumulativeDiskValues) { descriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -360,7 +361,7 @@ func initializeDiskInt64Metric(metric pdata.Metric, descriptor pdata.MetricDescr } } -func initializeDiskDoubleMetric(metric pdata.Metric, descriptor pdata.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ops cumulativeDiskValues) { +func initializeDiskDoubleMetric(metric dataold.Metric, descriptor dataold.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ops cumulativeDiskValues) { descriptor.CopyTo(metric.MetricDescriptor()) ddps := metric.DoubleDataPoints() @@ -374,7 +375,7 @@ func initializeDiskDoubleMetric(metric pdata.Metric, descriptor pdata.MetricDesc } } -func initializeDiskPendingOperationsMetric(metric pdata.Metric, now pdata.TimestampUnixNano, avgDiskQueueLengthValues []win_perf_counters.CounterValue) { +func initializeDiskPendingOperationsMetric(metric dataold.Metric, now pdata.TimestampUnixNano, avgDiskQueueLengthValues []win_perf_counters.CounterValue) { diskPendingOperationsDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -385,7 +386,7 @@ func initializeDiskPendingOperationsMetric(metric pdata.Metric, now pdata.Timest } } -func initializeInt64DataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value int64) { +func initializeInt64DataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) labelsMap.Insert(directionLabelName, directionLabel) @@ -394,7 +395,7 @@ func initializeInt64DataPoint(dataPoint pdata.Int64DataPoint, startTime, now pda dataPoint.SetValue(value) } -func initializeDoubleDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value float64) { +func initializeDoubleDataPoint(dataPoint dataold.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, deviceLabel string, directionLabel string, value float64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) labelsMap.Insert(directionLabelName, directionLabel) @@ -403,7 +404,7 @@ func initializeDoubleDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now p dataPoint.SetValue(value) } -func initializeDiskPendingDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, value int64) { +func initializeDiskPendingDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) dataPoint.SetTimestamp(now) diff --git a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_metadata.go index fa9b74703c6..4b4ae67716c 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_metadata.go @@ -15,7 +15,7 @@ package filesystemscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // labels @@ -35,22 +35,22 @@ const ( // descriptors -var fileSystemUsageDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var fileSystemUsageDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.filesystem.usage") descriptor.SetDescription("Filesystem bytes used.") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() -var fileSystemINodesUsageDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var fileSystemINodesUsageDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.filesystem.inodes.usage") descriptor.SetDescription("FileSystem operations count.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper.go index f1c73d73c7b..afa9b5ecf9d 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper.go @@ -23,6 +23,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -77,8 +78,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() now := internal.TimeToUnixNano(time.Now()) @@ -127,7 +128,7 @@ func deviceUsageAlreadySet(device string, usages []*deviceUsage) bool { return false } -func initializeFileSystemUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNano, deviceUsages []*deviceUsage) { +func initializeFileSystemUsageMetric(metric dataold.Metric, now pdata.TimestampUnixNano, deviceUsages []*deviceUsage) { fileSystemUsageDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -137,7 +138,7 @@ func initializeFileSystemUsageMetric(metric pdata.Metric, now pdata.TimestampUni } } -func initializeFileSystemUsageDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, stateLabel string, value int64) { +func initializeFileSystemUsageDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, stateLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) labelsMap.Insert(stateLabelName, stateLabel) diff --git a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_others.go b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_others.go index f78d9ce69c3..b7844106eef 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_others.go +++ b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_others.go @@ -18,16 +18,17 @@ package filesystemscraper import ( "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const fileSystemStatesLen = 2 -func appendFileSystemUsageStateDataPoints(idps pdata.Int64DataPointSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsage *deviceUsage) { +func appendFileSystemUsageStateDataPoints(idps dataold.Int64DataPointSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsage *deviceUsage) { initializeFileSystemUsageDataPoint(idps.At(startIdx+0), now, deviceUsage.deviceName, usedLabelValue, int64(deviceUsage.usage.Used)) initializeFileSystemUsageDataPoint(idps.At(startIdx+1), now, deviceUsage.deviceName, freeLabelValue, int64(deviceUsage.usage.Free)) } const systemSpecificMetricsLen = 0 -func appendSystemSpecificMetrics(metrics pdata.MetricSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsages []*deviceUsage) { +func appendSystemSpecificMetrics(metrics dataold.MetricSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsages []*deviceUsage) { } diff --git a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_test.go index 2775a3266a6..279b70c5ba8 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_test.go @@ -24,7 +24,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -146,7 +146,7 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertFileSystemUsageMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.MetricDescriptor, expectedDeviceDataPoints int) { +func assertFileSystemUsageMetricValid(t *testing.T, metric dataold.Metric, descriptor dataold.MetricDescriptor, expectedDeviceDataPoints int) { internal.AssertDescriptorEqual(t, descriptor, metric.MetricDescriptor()) if expectedDeviceDataPoints > 0 { assert.Equal(t, expectedDeviceDataPoints, metric.Int64DataPoints().Len()) @@ -157,7 +157,7 @@ func assertFileSystemUsageMetricValid(t *testing.T, metric pdata.Metric, descrip internal.AssertInt64MetricLabelHasValue(t, metric, 1, stateLabelName, freeLabelValue) } -func assertFileSystemUsageMetricHasUnixSpecificStateLabels(t *testing.T, metric pdata.Metric) { +func assertFileSystemUsageMetricHasUnixSpecificStateLabels(t *testing.T, metric dataold.Metric) { internal.AssertInt64MetricLabelHasValue(t, metric, 2, stateLabelName, reservedLabelValue) } diff --git a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_unix.go b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_unix.go index 09fc038129a..e854ab88646 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_unix.go +++ b/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/filesystem_scraper_unix.go @@ -18,11 +18,12 @@ package filesystemscraper import ( "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const fileSystemStatesLen = 3 -func appendFileSystemUsageStateDataPoints(idps pdata.Int64DataPointSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsage *deviceUsage) { +func appendFileSystemUsageStateDataPoints(idps dataold.Int64DataPointSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsage *deviceUsage) { initializeFileSystemUsageDataPoint(idps.At(startIdx+0), now, deviceUsage.deviceName, usedLabelValue, int64(deviceUsage.usage.Used)) initializeFileSystemUsageDataPoint(idps.At(startIdx+1), now, deviceUsage.deviceName, freeLabelValue, int64(deviceUsage.usage.Free)) initializeFileSystemUsageDataPoint(idps.At(startIdx+2), now, deviceUsage.deviceName, reservedLabelValue, int64(deviceUsage.usage.Total-deviceUsage.usage.Used-deviceUsage.usage.Free)) @@ -30,7 +31,7 @@ func appendFileSystemUsageStateDataPoints(idps pdata.Int64DataPointSlice, startI const systemSpecificMetricsLen = 1 -func appendSystemSpecificMetrics(metrics pdata.MetricSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsages []*deviceUsage) { +func appendSystemSpecificMetrics(metrics dataold.MetricSlice, startIdx int, now pdata.TimestampUnixNano, deviceUsages []*deviceUsage) { metric := metrics.At(startIdx) fileSystemINodesUsageDescriptor.CopyTo(metric.MetricDescriptor()) diff --git a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_metadata.go index 6194259f72e..2cd5d3b1bc7 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_metadata.go @@ -15,37 +15,37 @@ package loadscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // descriptors -var loadAvg1MDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var loadAvg1MDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.cpu.load_average.1m") descriptor.SetDescription("Average CPU Load over 1 minute.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeDouble) + descriptor.SetType(dataold.MetricTypeDouble) return descriptor }() -var loadAvg5mDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var loadAvg5mDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.cpu.load_average.5m") descriptor.SetDescription("Average CPU Load over 5 minutes.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeDouble) + descriptor.SetType(dataold.MetricTypeDouble) return descriptor }() -var loadAvg15mDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var loadAvg15mDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.cpu.load_average.15m") descriptor.SetDescription("Average CPU Load over 15 minutes.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeDouble) + descriptor.SetType(dataold.MetricTypeDouble) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper.go index 310f4a17fd1..34ba9ca515d 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper.go @@ -22,6 +22,7 @@ import ( "go.uber.org/zap" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -50,8 +51,8 @@ func (s *scraper) Close(ctx context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() now := internal.TimeToUnixNano(time.Now()) avgLoadValues, err := s.load() @@ -66,7 +67,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, nil } -func initializeLoadMetric(metric pdata.Metric, metricDescriptor pdata.MetricDescriptor, now pdata.TimestampUnixNano, value float64) { +func initializeLoadMetric(metric dataold.Metric, metricDescriptor dataold.MetricDescriptor, now pdata.TimestampUnixNano, value float64) { metricDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.DoubleDataPoints() diff --git a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper_test.go index b21eaa5062d..2c051cd97d2 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/loadscraper/load_scraper_test.go @@ -24,7 +24,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/zap" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -77,7 +77,7 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertMetricHasSingleDatapoint(t *testing.T, metric pdata.Metric, descriptor pdata.MetricDescriptor) { +func assertMetricHasSingleDatapoint(t *testing.T, metric dataold.Metric, descriptor dataold.MetricDescriptor) { internal.AssertDescriptorEqual(t, descriptor, metric.MetricDescriptor()) assert.Equal(t, 1, metric.DoubleDataPoints().Len()) } diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_metadata.go index 149ba5c9e8e..e1efa5f14fd 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_metadata.go @@ -15,7 +15,7 @@ package memoryscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // labels @@ -36,12 +36,12 @@ const ( // descriptors -var memoryUsageDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var memoryUsageDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.memory.usage") descriptor.SetDescription("Bytes of memory in use.") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper.go index 55aaa427ab4..8f42bb3d3b1 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper.go @@ -21,6 +21,7 @@ import ( "github.com/shirou/gopsutil/mem" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -48,8 +49,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() now := internal.TimeToUnixNano(time.Now()) memInfo, err := s.virtualMemory() @@ -62,7 +63,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, nil } -func initializeMemoryUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { +func initializeMemoryUsageMetric(metric dataold.Metric, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { memoryUsageDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -70,7 +71,7 @@ func initializeMemoryUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNan appendMemoryUsageStateDataPoints(idps, now, memInfo) } -func initializeMemoryUsageDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, stateLabel string, value int64) { +func initializeMemoryUsageDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, stateLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(stateLabelName, stateLabel) dataPoint.SetTimestamp(now) diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_linux.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_linux.go index 6ef07b0380a..869808fab7d 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_linux.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_linux.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/mem" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const memStatesLen = 6 -func appendMemoryUsageStateDataPoints(idps pdata.Int64DataPointSlice, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { +func appendMemoryUsageStateDataPoints(idps dataold.Int64DataPointSlice, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { initializeMemoryUsageDataPoint(idps.At(0), now, usedStateLabelValue, int64(memInfo.Used)) initializeMemoryUsageDataPoint(idps.At(1), now, freeStateLabelValue, int64(memInfo.Free)) initializeMemoryUsageDataPoint(idps.At(2), now, bufferedStateLabelValue, int64(memInfo.Buffers)) diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_others.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_others.go index dcca0930e36..ead86ffe692 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_others.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_others.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/mem" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const memStatesLen = 3 -func appendMemoryUsageStateDataPoints(idps pdata.Int64DataPointSlice, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { +func appendMemoryUsageStateDataPoints(idps dataold.Int64DataPointSlice, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { initializeMemoryUsageDataPoint(idps.At(0), now, usedStateLabelValue, int64(memInfo.Used)) initializeMemoryUsageDataPoint(idps.At(1), now, freeStateLabelValue, int64(memInfo.Free)) initializeMemoryUsageDataPoint(idps.At(2), now, inactiveStateLabelValue, int64(memInfo.Inactive)) diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_test.go index aea26a4585b..23267362708 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_test.go @@ -24,7 +24,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -79,14 +79,14 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertMemoryUsageMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.MetricDescriptor) { +func assertMemoryUsageMetricValid(t *testing.T, metric dataold.Metric, descriptor dataold.MetricDescriptor) { internal.AssertDescriptorEqual(t, descriptor, metric.MetricDescriptor()) assert.GreaterOrEqual(t, metric.Int64DataPoints().Len(), 2) internal.AssertInt64MetricLabelHasValue(t, metric, 0, stateLabelName, usedStateLabelValue) internal.AssertInt64MetricLabelHasValue(t, metric, 1, stateLabelName, freeStateLabelValue) } -func assertMemoryUsageMetricHasLinuxSpecificStateLabels(t *testing.T, metric pdata.Metric) { +func assertMemoryUsageMetricHasLinuxSpecificStateLabels(t *testing.T, metric dataold.Metric) { internal.AssertInt64MetricLabelHasValue(t, metric, 2, stateLabelName, bufferedStateLabelValue) internal.AssertInt64MetricLabelHasValue(t, metric, 3, stateLabelName, cachedStateLabelValue) internal.AssertInt64MetricLabelHasValue(t, metric, 4, stateLabelName, slabReclaimableStateLabelValue) diff --git a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_windows.go b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_windows.go index 198ffb8e116..f8777cbb30a 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_windows.go +++ b/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/memory_scraper_windows.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/mem" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const memStatesLen = 2 -func appendMemoryUsageStateDataPoints(idps pdata.Int64DataPointSlice, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { +func appendMemoryUsageStateDataPoints(idps dataold.Int64DataPointSlice, now pdata.TimestampUnixNano, memInfo *mem.VirtualMemoryStat) { initializeMemoryUsageDataPoint(idps.At(0), now, usedStateLabelValue, int64(memInfo.Used)) initializeMemoryUsageDataPoint(idps.At(1), now, freeStateLabelValue, int64(memInfo.Available)) } diff --git a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_metadata.go index 71f28ecf32d..d510deadf15 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_metadata.go @@ -15,7 +15,7 @@ package networkscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // network metric constants @@ -35,52 +35,52 @@ const ( // descriptors -var networkPacketsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var networkPacketsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.network.packets") descriptor.SetDescription("The number of packets transferred.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var networkDroppedPacketsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var networkDroppedPacketsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.network.dropped_packets") descriptor.SetDescription("The number of packets dropped.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var networkErrorsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var networkErrorsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.network.errors") descriptor.SetDescription("The number of errors encountered") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var networkIODescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var networkIODescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.network.io") descriptor.SetDescription("The number of bytes transmitted and received") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var networkTCPConnectionsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var networkTCPConnectionsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.network.tcp_connections") descriptor.SetDescription("The number of tcp connections") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper.go index a3ecb838c35..ff1b947854a 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper.go @@ -24,6 +24,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -81,8 +82,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() var errors []error @@ -99,7 +100,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, componenterror.CombineErrors(errors) } -func (s *scraper) scrapeAndAppendNetworkCounterMetrics(metrics pdata.MetricSlice, startTime pdata.TimestampUnixNano) error { +func (s *scraper) scrapeAndAppendNetworkCounterMetrics(metrics dataold.MetricSlice, startTime pdata.TimestampUnixNano) error { now := internal.TimeToUnixNano(time.Now()) // get total stats only @@ -123,7 +124,7 @@ func (s *scraper) scrapeAndAppendNetworkCounterMetrics(metrics pdata.MetricSlice return nil } -func initializeNetworkPacketsMetric(metric pdata.Metric, metricDescriptor pdata.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { +func initializeNetworkPacketsMetric(metric dataold.Metric, metricDescriptor dataold.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { metricDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -134,7 +135,7 @@ func initializeNetworkPacketsMetric(metric pdata.Metric, metricDescriptor pdata. } } -func initializeNetworkDroppedPacketsMetric(metric pdata.Metric, metricDescriptor pdata.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { +func initializeNetworkDroppedPacketsMetric(metric dataold.Metric, metricDescriptor dataold.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { metricDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -145,7 +146,7 @@ func initializeNetworkDroppedPacketsMetric(metric pdata.Metric, metricDescriptor } } -func initializeNetworkErrorsMetric(metric pdata.Metric, metricDescriptor pdata.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { +func initializeNetworkErrorsMetric(metric dataold.Metric, metricDescriptor dataold.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { metricDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -156,7 +157,7 @@ func initializeNetworkErrorsMetric(metric pdata.Metric, metricDescriptor pdata.M } } -func initializeNetworkIOMetric(metric pdata.Metric, metricDescriptor pdata.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { +func initializeNetworkIOMetric(metric dataold.Metric, metricDescriptor dataold.MetricDescriptor, startTime, now pdata.TimestampUnixNano, ioCountersSlice []net.IOCountersStat) { metricDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -167,7 +168,7 @@ func initializeNetworkIOMetric(metric pdata.Metric, metricDescriptor pdata.Metri } } -func initializeNetworkDataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, interfaceLabel, directionLabel string, value int64) { +func initializeNetworkDataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, interfaceLabel, directionLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(interfaceLabelName, interfaceLabel) labelsMap.Insert(directionLabelName, directionLabel) @@ -176,7 +177,7 @@ func initializeNetworkDataPoint(dataPoint pdata.Int64DataPoint, startTime, now p dataPoint.SetValue(value) } -func (s *scraper) scrapeAndAppendNetworkTCPConnectionsMetric(metrics pdata.MetricSlice) error { +func (s *scraper) scrapeAndAppendNetworkTCPConnectionsMetric(metrics dataold.MetricSlice) error { now := internal.TimeToUnixNano(time.Now()) connections, err := s.connections("tcp") @@ -214,7 +215,7 @@ func getTCPConnectionStatusCounts(connections []net.ConnectionStat) map[string]i return tcpStatuses } -func initializeNetworkTCPConnectionsMetric(metric pdata.Metric, now pdata.TimestampUnixNano, connectionStateCounts map[string]int64) { +func initializeNetworkTCPConnectionsMetric(metric dataold.Metric, now pdata.TimestampUnixNano, connectionStateCounts map[string]int64) { networkTCPConnectionsDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -227,7 +228,7 @@ func initializeNetworkTCPConnectionsMetric(metric pdata.Metric, now pdata.Timest } } -func initializeNetworkTCPConnectionsDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, stateLabel string, value int64) { +func initializeNetworkTCPConnectionsDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, stateLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(stateLabelName, stateLabel) dataPoint.SetTimestamp(now) diff --git a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper_test.go index 7164d88d608..08e747c7d5b 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/networkscraper/network_scraper_test.go @@ -24,6 +24,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -142,7 +143,7 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertNetworkIOMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.MetricDescriptor, startTime pdata.TimestampUnixNano) { +func assertNetworkIOMetricValid(t *testing.T, metric dataold.Metric, descriptor dataold.MetricDescriptor, startTime pdata.TimestampUnixNano) { internal.AssertDescriptorEqual(t, descriptor, metric.MetricDescriptor()) if startTime != 0 { internal.AssertInt64MetricStartTimeEquals(t, metric, startTime) @@ -153,7 +154,7 @@ func assertNetworkIOMetricValid(t *testing.T, metric pdata.Metric, descriptor pd internal.AssertInt64MetricLabelHasValue(t, metric, 1, directionLabelName, receiveDirectionLabelValue) } -func assertNetworkTCPConnectionsMetricValid(t *testing.T, metric pdata.Metric) { +func assertNetworkTCPConnectionsMetricValid(t *testing.T, metric dataold.Metric) { internal.AssertDescriptorEqual(t, networkTCPConnectionsDescriptor, metric.MetricDescriptor()) internal.AssertInt64MetricLabelExists(t, metric, 0, stateLabelName) assert.Equal(t, 12, metric.Int64DataPoints().Len()) diff --git a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper.go b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper.go index ad0741e0928..82fdfab64ee 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper.go @@ -19,7 +19,7 @@ import ( "go.opencensus.io/trace" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -42,7 +42,7 @@ func (s *resourceScraper) Close(ctx context.Context) error { } // ScrapeMetrics -func (s *resourceScraper) ScrapeMetrics(ctx context.Context) (pdata.ResourceMetricsSlice, error) { +func (s *resourceScraper) ScrapeMetrics(ctx context.Context) (dataold.ResourceMetricsSlice, error) { // TODO: Add metrics. ctx, span := trace.StartSpan(ctx, s.scrapeMetricsSpanName) defer span.End() diff --git a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper_test.go index db8173f892d..fda6a1cb5fa 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportresourcescraper_test.go @@ -22,8 +22,8 @@ import ( "github.com/stretchr/testify/assert" "go.opencensus.io/trace" - "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestWrapResourceScraper(t *testing.T) { @@ -66,11 +66,11 @@ func (s *testResourceScraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *testResourceScraper) ScrapeMetrics(ctx context.Context) (pdata.ResourceMetricsSlice, error) { +func (s *testResourceScraper) ScrapeMetrics(ctx context.Context) (dataold.ResourceMetricsSlice, error) { assert.NotNil(s.t, trace.FromContext(ctx)) return generateResourceMetricsSlice(), s.err } -func generateResourceMetricsSlice() pdata.ResourceMetricsSlice { - return testdata.GenerateMetricDataOneMetric().ResourceMetrics() +func generateResourceMetricsSlice() dataold.ResourceMetricsSlice { + return testdataold.GenerateMetricDataOneMetric().ResourceMetrics() } diff --git a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper.go b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper.go index 520b692f1c1..60dba7147f4 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper.go @@ -19,7 +19,7 @@ import ( "go.opencensus.io/trace" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -42,7 +42,7 @@ func (s *scraper) Close(ctx context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(ctx context.Context) (pdata.MetricSlice, error) { +func (s *scraper) ScrapeMetrics(ctx context.Context) (dataold.MetricSlice, error) { // TODO: Add metrics. ctx, span := trace.StartSpan(ctx, s.scrapeMetricsSpanName) defer span.End() diff --git a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper_test.go index 6ae0e59f128..2e227d021ba 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/obsreportscraper/obsreportscraper_test.go @@ -22,8 +22,8 @@ import ( "github.com/stretchr/testify/assert" "go.opencensus.io/trace" - "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestWrapScraper(t *testing.T) { @@ -66,11 +66,11 @@ func (s *testScraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *testScraper) ScrapeMetrics(ctx context.Context) (pdata.MetricSlice, error) { +func (s *testScraper) ScrapeMetrics(ctx context.Context) (dataold.MetricSlice, error) { assert.NotNil(s.t, trace.FromContext(ctx)) return generateMetricsSlice(), s.err } -func generateMetricsSlice() pdata.MetricSlice { - return testdata.GenerateMetricDataOneMetric().ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics() +func generateMetricsSlice() dataold.MetricSlice { + return testdataold.GenerateMetricDataOneMetric().ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics() } diff --git a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_metadata.go index 2c12504c73a..e69da387856 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_metadata.go @@ -15,27 +15,27 @@ package processesscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // descriptors -var processesRunningDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var processesRunningDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.processes.running") descriptor.SetDescription("Total number of running processes.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() -var processesBlockedDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var processesBlockedDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.processes.blocked") descriptor.SetDescription("Total number of blocked processes.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper.go index 8a8424783fc..66a884aac79 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper.go @@ -21,6 +21,7 @@ import ( "github.com/shirou/gopsutil/load" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // scraper for Processes Metrics @@ -56,8 +57,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() err := appendSystemSpecificProcessesMetrics(metrics, 0, s.misc) return metrics, err } diff --git a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_fallback.go b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_fallback.go index 47f2a98e7dc..b71d075cae5 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_fallback.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_fallback.go @@ -17,9 +17,9 @@ package processesscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) -func appendSystemSpecificProcessesMetrics(metrics pdata.MetricSlice, startIndex int, miscFunc getMiscStats) error { +func appendSystemSpecificProcessesMetrics(metrics dataold.MetricSlice, startIndex int, miscFunc getMiscStats) error { return nil } diff --git a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_test.go index 0b8a3a2e7f5..0c7365a33e4 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_test.go @@ -24,11 +24,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) -var systemSpecificMetrics = map[string][]pdata.MetricDescriptor{ +var systemSpecificMetrics = map[string][]dataold.MetricDescriptor{ "linux": {processesRunningDescriptor, processesBlockedDescriptor}, "darwin": {processesRunningDescriptor, processesBlockedDescriptor}, "freebsd": {processesRunningDescriptor, processesBlockedDescriptor}, @@ -83,7 +83,7 @@ func TestScrapeMetrics(t *testing.T) { } } -func assertProcessesMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.MetricDescriptor) { +func assertProcessesMetricValid(t *testing.T, metric dataold.Metric, descriptor dataold.MetricDescriptor) { internal.AssertDescriptorEqual(t, descriptor, metric.MetricDescriptor()) assert.Equal(t, metric.Int64DataPoints().Len(), 1) assert.Equal(t, metric.Int64DataPoints().At(0).LabelsMap().Len(), 0) diff --git a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_unix.go b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_unix.go index cf8a313c21b..8b431fed1c8 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_unix.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processesscraper/processes_scraper_unix.go @@ -20,10 +20,11 @@ import ( "time" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) -func appendSystemSpecificProcessesMetrics(metrics pdata.MetricSlice, startIndex int, miscFunc getMiscStats) error { +func appendSystemSpecificProcessesMetrics(metrics dataold.MetricSlice, startIndex int, miscFunc getMiscStats) error { now := internal.TimeToUnixNano(time.Now()) misc, err := miscFunc() if err != nil { @@ -36,7 +37,7 @@ func appendSystemSpecificProcessesMetrics(metrics pdata.MetricSlice, startIndex return nil } -func initializeProcessesMetric(metric pdata.Metric, descriptor pdata.MetricDescriptor, now pdata.TimestampUnixNano, value int64) { +func initializeProcessesMetric(metric dataold.Metric, descriptor dataold.MetricDescriptor, now pdata.TimestampUnixNano, value int64) { descriptor.CopyTo(metric.MetricDescriptor()) ddps := metric.Int64DataPoints() diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_metadata.go index 887332319d3..f95d8954450 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_metadata.go @@ -15,7 +15,7 @@ package processscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // labels @@ -42,42 +42,42 @@ const ( // descriptors -var cpuTimeDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var cpuTimeDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("process.cpu.time") descriptor.SetDescription("Total CPU seconds broken down by different states.") descriptor.SetUnit("s") - descriptor.SetType(pdata.MetricTypeMonotonicDouble) + descriptor.SetType(dataold.MetricTypeMonotonicDouble) return descriptor }() -var physicalMemoryUsageDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var physicalMemoryUsageDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("process.memory.physical_usage") descriptor.SetDescription("The amount of physical memory in use.") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() -var virtualMemoryUsageDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var virtualMemoryUsageDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("process.memory.virtual_usage") descriptor.SetDescription("Virtual memory size.") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() -var diskIODescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var diskIODescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("process.disk.io") descriptor.SetDescription("Disk bytes transferred.") descriptor.SetUnit("bytes") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper.go index a66e7954ae0..dbb3e765a41 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper.go @@ -25,6 +25,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -81,7 +82,7 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.ResourceMetricsSlice, error) { +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.ResourceMetricsSlice, error) { var errs []error metadata, err := s.getProcessMetadata() @@ -89,7 +90,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.ResourceMetricsSlice, errs = append(errs, err) } - rms := pdata.NewResourceMetricsSlice() + rms := dataold.NewResourceMetricsSlice() rms.Resize(len(metadata)) for i, md := range metadata { rm := rms.At(i) @@ -169,7 +170,7 @@ func (s *scraper) getProcessMetadata() ([]*processMetadata, error) { return metadata, componenterror.CombineErrors(errs) } -func scrapeAndAppendCPUTimeMetric(metrics pdata.MetricSlice, startTime, now pdata.TimestampUnixNano, handle processHandle) error { +func scrapeAndAppendCPUTimeMetric(metrics dataold.MetricSlice, startTime, now pdata.TimestampUnixNano, handle processHandle) error { times, err := handle.Times() if err != nil { return err @@ -181,7 +182,7 @@ func scrapeAndAppendCPUTimeMetric(metrics pdata.MetricSlice, startTime, now pdat return nil } -func initializeCPUTimeMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, times *cpu.TimesStat) { +func initializeCPUTimeMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, times *cpu.TimesStat) { cpuTimeDescriptor.CopyTo(metric.MetricDescriptor()) ddps := metric.DoubleDataPoints() @@ -189,7 +190,7 @@ func initializeCPUTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp appendCPUTimeStateDataPoints(ddps, startTime, now, times) } -func scrapeAndAppendMemoryUsageMetrics(metrics pdata.MetricSlice, now pdata.TimestampUnixNano, handle processHandle) error { +func scrapeAndAppendMemoryUsageMetrics(metrics dataold.MetricSlice, now pdata.TimestampUnixNano, handle processHandle) error { mem, err := handle.MemoryInfo() if err != nil { return err @@ -202,7 +203,7 @@ func scrapeAndAppendMemoryUsageMetrics(metrics pdata.MetricSlice, now pdata.Time return nil } -func initializeMemoryUsageMetric(metric pdata.Metric, descriptor pdata.MetricDescriptor, now pdata.TimestampUnixNano, usage int64) { +func initializeMemoryUsageMetric(metric dataold.Metric, descriptor dataold.MetricDescriptor, now pdata.TimestampUnixNano, usage int64) { descriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -210,12 +211,12 @@ func initializeMemoryUsageMetric(metric pdata.Metric, descriptor pdata.MetricDes initializeMemoryUsageDataPoint(idps.At(0), now, usage) } -func initializeMemoryUsageDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, usage int64) { +func initializeMemoryUsageDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, usage int64) { dataPoint.SetTimestamp(now) dataPoint.SetValue(usage) } -func scrapeAndAppendDiskIOMetric(metrics pdata.MetricSlice, startTime, now pdata.TimestampUnixNano, handle processHandle) error { +func scrapeAndAppendDiskIOMetric(metrics dataold.MetricSlice, startTime, now pdata.TimestampUnixNano, handle processHandle) error { io, err := handle.IOCounters() if err != nil { return err @@ -227,7 +228,7 @@ func scrapeAndAppendDiskIOMetric(metrics pdata.MetricSlice, startTime, now pdata return nil } -func initializeDiskIOMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, io *process.IOCountersStat) { +func initializeDiskIOMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, io *process.IOCountersStat) { diskIODescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -236,7 +237,7 @@ func initializeDiskIOMetric(metric pdata.Metric, startTime, now pdata.TimestampU initializeDiskIODataPoint(idps.At(1), startTime, now, int64(io.WriteBytes), writeDirectionLabelValue) } -func initializeDiskIODataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, value int64, directionLabel string) { +func initializeDiskIODataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, value int64, directionLabel string) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(directionLabelName, directionLabel) dataPoint.SetStartTime(startTime) diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_linux.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_linux.go index 62dc40cde23..928242b9f9a 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_linux.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_linux.go @@ -20,17 +20,18 @@ import ( "github.com/shirou/gopsutil/cpu" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const cpuStatesLen = 3 -func appendCPUTimeStateDataPoints(ddps pdata.DoubleDataPointSlice, startTime, now pdata.TimestampUnixNano, cpuTime *cpu.TimesStat) { +func appendCPUTimeStateDataPoints(ddps dataold.DoubleDataPointSlice, startTime, now pdata.TimestampUnixNano, cpuTime *cpu.TimesStat) { initializeCPUTimeDataPoint(ddps.At(0), startTime, now, cpuTime.User, userStateLabelValue) initializeCPUTimeDataPoint(ddps.At(1), startTime, now, cpuTime.System, systemStateLabelValue) initializeCPUTimeDataPoint(ddps.At(2), startTime, now, cpuTime.Iowait, waitStateLabelValue) } -func initializeCPUTimeDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, value float64, stateLabel string) { +func initializeCPUTimeDataPoint(dataPoint dataold.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, value float64, stateLabel string) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(stateLabelName, stateLabel) dataPoint.SetStartTime(startTime) diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_others.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_others.go index bd6237f187b..8256dfebae8 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_others.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_others.go @@ -20,11 +20,12 @@ import ( "github.com/shirou/gopsutil/cpu" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const cpuStatesLen = 0 -func appendCPUTimeStateDataPoints(ddps pdata.DoubleDataPointSlice, startTime, now pdata.TimestampUnixNano, cpuTime *cpu.TimesStat) { +func appendCPUTimeStateDataPoints(ddps dataold.DoubleDataPointSlice, startTime, now pdata.TimestampUnixNano, cpuTime *cpu.TimesStat) { } func getProcessExecutable(proc processHandle) (*executableMetadata, error) { diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_test.go index 7ac1d88670a..ea29da5614a 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_test.go @@ -29,6 +29,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/processor/filterset" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" "go.opentelemetry.io/collector/translator/conventions" @@ -77,7 +78,7 @@ func TestScrapeMetrics(t *testing.T) { assertSameTimeStampForAllMetricsWithinResource(t, resourceMetrics) } -func assertProcessResourceAttributesExist(t *testing.T, resourceMetrics pdata.ResourceMetricsSlice) { +func assertProcessResourceAttributesExist(t *testing.T, resourceMetrics dataold.ResourceMetricsSlice) { for i := 0; i < resourceMetrics.Len(); i++ { attr := resourceMetrics.At(0).Resource().Attributes() internal.AssertContainsAttribute(t, attr, conventions.AttributeProcessID) @@ -89,7 +90,7 @@ func assertProcessResourceAttributesExist(t *testing.T, resourceMetrics pdata.Re } } -func assertCPUTimeMetricValid(t *testing.T, resourceMetrics pdata.ResourceMetricsSlice, startTime pdata.TimestampUnixNano) { +func assertCPUTimeMetricValid(t *testing.T, resourceMetrics dataold.ResourceMetricsSlice, startTime pdata.TimestampUnixNano) { cpuTimeMetric := getMetric(t, cpuTimeDescriptor, resourceMetrics) internal.AssertDescriptorEqual(t, cpuTimeDescriptor, cpuTimeMetric.MetricDescriptor()) if startTime != 0 { @@ -102,12 +103,12 @@ func assertCPUTimeMetricValid(t *testing.T, resourceMetrics pdata.ResourceMetric } } -func assertMemoryUsageMetricValid(t *testing.T, descriptor pdata.MetricDescriptor, resourceMetrics pdata.ResourceMetricsSlice) { +func assertMemoryUsageMetricValid(t *testing.T, descriptor dataold.MetricDescriptor, resourceMetrics dataold.ResourceMetricsSlice) { memoryUsageMetric := getMetric(t, descriptor, resourceMetrics) internal.AssertDescriptorEqual(t, descriptor, memoryUsageMetric.MetricDescriptor()) } -func assertDiskIOMetricValid(t *testing.T, resourceMetrics pdata.ResourceMetricsSlice, startTime pdata.TimestampUnixNano) { +func assertDiskIOMetricValid(t *testing.T, resourceMetrics dataold.ResourceMetricsSlice, startTime pdata.TimestampUnixNano) { diskIOMetric := getMetric(t, diskIODescriptor, resourceMetrics) internal.AssertDescriptorEqual(t, diskIODescriptor, diskIOMetric.MetricDescriptor()) if startTime != 0 { @@ -117,7 +118,7 @@ func assertDiskIOMetricValid(t *testing.T, resourceMetrics pdata.ResourceMetrics internal.AssertInt64MetricLabelHasValue(t, diskIOMetric, 1, directionLabelName, writeDirectionLabelValue) } -func assertSameTimeStampForAllMetricsWithinResource(t *testing.T, resourceMetrics pdata.ResourceMetricsSlice) { +func assertSameTimeStampForAllMetricsWithinResource(t *testing.T, resourceMetrics dataold.ResourceMetricsSlice) { for i := 0; i < resourceMetrics.Len(); i++ { ilms := resourceMetrics.At(i).InstrumentationLibraryMetrics() for j := 0; j < ilms.Len(); j++ { @@ -126,7 +127,7 @@ func assertSameTimeStampForAllMetricsWithinResource(t *testing.T, resourceMetric } } -func getMetric(t *testing.T, descriptor pdata.MetricDescriptor, rms pdata.ResourceMetricsSlice) pdata.Metric { +func getMetric(t *testing.T, descriptor dataold.MetricDescriptor, rms dataold.ResourceMetricsSlice) dataold.Metric { for i := 0; i < rms.Len(); i++ { metrics := getMetricSlice(t, rms.At(i)) for j := 0; j < metrics.Len(); j++ { @@ -138,10 +139,10 @@ func getMetric(t *testing.T, descriptor pdata.MetricDescriptor, rms pdata.Resour } require.Fail(t, fmt.Sprintf("no metric with name %s was returned", descriptor.Name())) - return pdata.NewMetric() + return dataold.NewMetric() } -func getMetricSlice(t *testing.T, rm pdata.ResourceMetrics) pdata.MetricSlice { +func getMetricSlice(t *testing.T, rm dataold.ResourceMetrics) dataold.MetricSlice { ilms := rm.InstrumentationLibraryMetrics() require.Equal(t, 1, ilms.Len()) return ilms.At(0).Metrics() diff --git a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_windows.go b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_windows.go index 45d0dfd345c..263ade48407 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_windows.go +++ b/receiver/hostmetricsreceiver/internal/scraper/processscraper/process_scraper_windows.go @@ -23,16 +23,17 @@ import ( "github.com/shirou/gopsutil/cpu" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) const cpuStatesLen = 2 -func appendCPUTimeStateDataPoints(ddps pdata.DoubleDataPointSlice, startTime, now pdata.TimestampUnixNano, cpuTime *cpu.TimesStat) { +func appendCPUTimeStateDataPoints(ddps dataold.DoubleDataPointSlice, startTime, now pdata.TimestampUnixNano, cpuTime *cpu.TimesStat) { initializeCPUTimeDataPoint(ddps.At(0), startTime, now, cpuTime.User, userStateLabelValue) initializeCPUTimeDataPoint(ddps.At(1), startTime, now, cpuTime.System, systemStateLabelValue) } -func initializeCPUTimeDataPoint(dataPoint pdata.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, value float64, stateLabel string) { +func initializeCPUTimeDataPoint(dataPoint dataold.DoubleDataPoint, startTime, now pdata.TimestampUnixNano, value float64, stateLabel string) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(stateLabelName, stateLabel) dataPoint.SetStartTime(startTime) diff --git a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_metadata.go b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_metadata.go index b78b0205033..cbca2791e9f 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_metadata.go +++ b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_metadata.go @@ -15,7 +15,7 @@ package swapscraper import ( - "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) // labels @@ -49,32 +49,32 @@ const ( minorTypeLabelValue = "minor" ) -var swapUsageDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var swapUsageDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.swap.usage") descriptor.SetDescription("Swap (unix) or pagefile (windows) usage.") descriptor.SetUnit("pages") - descriptor.SetType(pdata.MetricTypeInt64) + descriptor.SetType(dataold.MetricTypeInt64) return descriptor }() -var swapPagingDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var swapPagingDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.swap.paging_ops") descriptor.SetDescription("The number of paging operations.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() -var swapPageFaultsDescriptor = func() pdata.MetricDescriptor { - descriptor := pdata.NewMetricDescriptor() +var swapPageFaultsDescriptor = func() dataold.MetricDescriptor { + descriptor := dataold.NewMetricDescriptor() descriptor.InitEmpty() descriptor.SetName("system.swap.page_faults") descriptor.SetDescription("The number of page faults.") descriptor.SetUnit("1") - descriptor.SetType(pdata.MetricTypeMonotonicInt64) + descriptor.SetType(dataold.MetricTypeMonotonicInt64) return descriptor }() diff --git a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_others.go b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_others.go index 7e9697b3d9f..40230ff3437 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_others.go +++ b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_others.go @@ -25,6 +25,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -61,8 +62,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() var errors []error @@ -79,7 +80,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, componenterror.CombineErrors(errors) } -func (s *scraper) scrapeAndAppendSwapUsageMetric(metrics pdata.MetricSlice) error { +func (s *scraper) scrapeAndAppendSwapUsageMetric(metrics dataold.MetricSlice) error { now := internal.TimeToUnixNano(time.Now()) vmem, err := s.virtualMemory() if err != nil { @@ -92,7 +93,7 @@ func (s *scraper) scrapeAndAppendSwapUsageMetric(metrics pdata.MetricSlice) erro return nil } -func initializeSwapUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNano, vmem *mem.VirtualMemoryStat) { +func initializeSwapUsageMetric(metric dataold.Metric, now pdata.TimestampUnixNano, vmem *mem.VirtualMemoryStat) { swapUsageDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -102,14 +103,14 @@ func initializeSwapUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNano, initializeSwapUsageDataPoint(idps.At(2), now, cachedLabelValue, int64(vmem.SwapCached)) } -func initializeSwapUsageDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, stateLabel string, value int64) { +func initializeSwapUsageDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, stateLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(stateLabelName, stateLabel) dataPoint.SetTimestamp(now) dataPoint.SetValue(value) } -func (s *scraper) scrapeAndAppendPagingMetrics(metrics pdata.MetricSlice) error { +func (s *scraper) scrapeAndAppendPagingMetrics(metrics dataold.MetricSlice) error { now := internal.TimeToUnixNano(time.Now()) swap, err := s.swapMemory() if err != nil { @@ -123,7 +124,7 @@ func (s *scraper) scrapeAndAppendPagingMetrics(metrics pdata.MetricSlice) error return nil } -func initializePagingMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, swap *mem.SwapMemoryStat) { +func initializePagingMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, swap *mem.SwapMemoryStat) { swapPagingDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -134,7 +135,7 @@ func initializePagingMetric(metric pdata.Metric, startTime, now pdata.TimestampU initializePagingDataPoint(idps.At(3), startTime, now, minorTypeLabelValue, outDirectionLabelValue, int64(swap.PgOut)) } -func initializePagingDataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, typeLabel string, directionLabel string, value int64) { +func initializePagingDataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, typeLabel string, directionLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(typeLabelName, typeLabel) labelsMap.Insert(directionLabelName, directionLabel) @@ -143,7 +144,7 @@ func initializePagingDataPoint(dataPoint pdata.Int64DataPoint, startTime, now pd dataPoint.SetValue(value) } -func initializePageFaultsMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, swap *mem.SwapMemoryStat) { +func initializePageFaultsMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, swap *mem.SwapMemoryStat) { swapPageFaultsDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -152,7 +153,7 @@ func initializePageFaultsMetric(metric pdata.Metric, startTime, now pdata.Timest // TODO add swap.PgMajFault once available in gopsutil } -func initializePageFaultDataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, typeLabel string, value int64) { +func initializePageFaultDataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, typeLabel string, value int64) { dataPoint.LabelsMap().Insert(typeLabelName, typeLabel) dataPoint.SetStartTime(startTime) dataPoint.SetTimestamp(now) diff --git a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_test.go b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_test.go index b621a0cde6e..77ddcbfd0df 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_test.go +++ b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_test.go @@ -23,6 +23,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" ) @@ -52,7 +53,7 @@ func TestScrapeMetrics(t *testing.T) { internal.AssertSameTimeStampForMetrics(t, metrics, 1, metrics.Len()) } -func assertSwapUsageMetricValid(t *testing.T, hostSwapUsageMetric pdata.Metric) { +func assertSwapUsageMetricValid(t *testing.T, hostSwapUsageMetric dataold.Metric) { internal.AssertDescriptorEqual(t, swapUsageDescriptor, hostSwapUsageMetric.MetricDescriptor()) // it's valid for a system to have no swap space / paging file, so if no data points were returned, do no validation @@ -81,7 +82,7 @@ func assertSwapUsageMetricValid(t *testing.T, hostSwapUsageMetric pdata.Metric) } } -func assertPagingMetricValid(t *testing.T, pagingMetric pdata.Metric, startTime pdata.TimestampUnixNano) { +func assertPagingMetricValid(t *testing.T, pagingMetric dataold.Metric, startTime pdata.TimestampUnixNano) { internal.AssertDescriptorEqual(t, swapPagingDescriptor, pagingMetric.MetricDescriptor()) if startTime != 0 { internal.AssertInt64MetricStartTimeEquals(t, pagingMetric, startTime) @@ -106,7 +107,7 @@ func assertPagingMetricValid(t *testing.T, pagingMetric pdata.Metric, startTime } } -func assertPageFaultsMetricValid(t *testing.T, pageFaultsMetric pdata.Metric, startTime pdata.TimestampUnixNano) { +func assertPageFaultsMetricValid(t *testing.T, pageFaultsMetric dataold.Metric, startTime pdata.TimestampUnixNano) { internal.AssertDescriptorEqual(t, swapPageFaultsDescriptor, pageFaultsMetric.MetricDescriptor()) if startTime != 0 { internal.AssertInt64MetricStartTimeEquals(t, pageFaultsMetric, startTime) diff --git a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_windows.go b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_windows.go index 30e30c107d1..86b3594e407 100644 --- a/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_windows.go +++ b/receiver/hostmetricsreceiver/internal/scraper/swapscraper/swap_scraper_windows.go @@ -23,6 +23,7 @@ import ( "go.opentelemetry.io/collector/component/componenterror" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/windows/pdh" ) @@ -91,8 +92,8 @@ func (s *scraper) Close(_ context.Context) error { } // ScrapeMetrics -func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { - metrics := pdata.NewMetricSlice() +func (s *scraper) ScrapeMetrics(_ context.Context) (dataold.MetricSlice, error) { + metrics := dataold.NewMetricSlice() var errors []error @@ -109,7 +110,7 @@ func (s *scraper) ScrapeMetrics(_ context.Context) (pdata.MetricSlice, error) { return metrics, componenterror.CombineErrors(errors) } -func (s *scraper) scrapeAndAppendSwapUsageMetric(metrics pdata.MetricSlice) error { +func (s *scraper) scrapeAndAppendSwapUsageMetric(metrics dataold.MetricSlice) error { now := internal.TimeToUnixNano(time.Now()) pageFiles, err := s.pageFileStats() if err != nil { @@ -122,7 +123,7 @@ func (s *scraper) scrapeAndAppendSwapUsageMetric(metrics pdata.MetricSlice) erro return nil } -func initializeSwapUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNano, pageFiles []*pageFileData) { +func initializeSwapUsageMetric(metric dataold.Metric, now pdata.TimestampUnixNano, pageFiles []*pageFileData) { swapUsageDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -136,7 +137,7 @@ func initializeSwapUsageMetric(metric pdata.Metric, now pdata.TimestampUnixNano, } } -func initializeSwapUsageDataPoint(dataPoint pdata.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, stateLabel string, value int64) { +func initializeSwapUsageDataPoint(dataPoint dataold.Int64DataPoint, now pdata.TimestampUnixNano, deviceLabel string, stateLabel string, value int64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(deviceLabelName, deviceLabel) labelsMap.Insert(stateLabelName, stateLabel) @@ -144,7 +145,7 @@ func initializeSwapUsageDataPoint(dataPoint pdata.Int64DataPoint, now pdata.Time dataPoint.SetValue(value) } -func (s *scraper) scrapeAndAppendPagingMetric(metrics pdata.MetricSlice) error { +func (s *scraper) scrapeAndAppendPagingMetric(metrics dataold.MetricSlice) error { now := time.Now() durationSinceLastScraped := now.Sub(s.prevPagingScrapeTime).Seconds() s.prevPagingScrapeTime = now @@ -169,7 +170,7 @@ func (s *scraper) scrapeAndAppendPagingMetric(metrics pdata.MetricSlice) error { return nil } -func initializePagingMetric(metric pdata.Metric, startTime, now pdata.TimestampUnixNano, reads float64, writes float64) { +func initializePagingMetric(metric dataold.Metric, startTime, now pdata.TimestampUnixNano, reads float64, writes float64) { swapPagingDescriptor.CopyTo(metric.MetricDescriptor()) idps := metric.Int64DataPoints() @@ -178,7 +179,7 @@ func initializePagingMetric(metric pdata.Metric, startTime, now pdata.TimestampU initializePagingDataPoint(idps.At(1), startTime, now, outDirectionLabelValue, writes) } -func initializePagingDataPoint(dataPoint pdata.Int64DataPoint, startTime, now pdata.TimestampUnixNano, directionLabel string, value float64) { +func initializePagingDataPoint(dataPoint dataold.Int64DataPoint, startTime, now pdata.TimestampUnixNano, directionLabel string, value float64) { labelsMap := dataPoint.LabelsMap() labelsMap.Insert(typeLabelName, majorTypeLabelValue) labelsMap.Insert(directionLabelName, directionLabel) diff --git a/receiver/hostmetricsreceiver/internal/testutils.go b/receiver/hostmetricsreceiver/internal/testutils.go index 53778c2b9aa..61053ef256c 100644 --- a/receiver/hostmetricsreceiver/internal/testutils.go +++ b/receiver/hostmetricsreceiver/internal/testutils.go @@ -21,6 +21,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" ) func AssertContainsAttribute(t *testing.T, attr pdata.AttributeMap, key string) { @@ -28,54 +29,54 @@ func AssertContainsAttribute(t *testing.T, attr pdata.AttributeMap, key string) assert.True(t, ok) } -func AssertDescriptorEqual(t *testing.T, expected pdata.MetricDescriptor, actual pdata.MetricDescriptor) { +func AssertDescriptorEqual(t *testing.T, expected dataold.MetricDescriptor, actual dataold.MetricDescriptor) { assert.Equal(t, expected.Name(), actual.Name()) assert.Equal(t, expected.Description(), actual.Description()) assert.Equal(t, expected.Unit(), actual.Unit()) assert.Equal(t, expected.Type(), actual.Type()) } -func AssertInt64MetricLabelHasValue(t *testing.T, metric pdata.Metric, index int, labelName string, expectedVal string) { +func AssertInt64MetricLabelHasValue(t *testing.T, metric dataold.Metric, index int, labelName string, expectedVal string) { val, ok := metric.Int64DataPoints().At(index).LabelsMap().Get(labelName) assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.MetricDescriptor().Name()) assert.Equal(t, expectedVal, val.Value()) } -func AssertDoubleMetricLabelHasValue(t *testing.T, metric pdata.Metric, index int, labelName string, expectedVal string) { +func AssertDoubleMetricLabelHasValue(t *testing.T, metric dataold.Metric, index int, labelName string, expectedVal string) { val, ok := metric.DoubleDataPoints().At(index).LabelsMap().Get(labelName) assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.MetricDescriptor().Name()) assert.Equal(t, expectedVal, val.Value()) } -func AssertInt64MetricLabelExists(t *testing.T, metric pdata.Metric, index int, labelName string) { +func AssertInt64MetricLabelExists(t *testing.T, metric dataold.Metric, index int, labelName string) { _, ok := metric.Int64DataPoints().At(index).LabelsMap().Get(labelName) assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.MetricDescriptor().Name()) } -func AssertDoubleMetricLabelExists(t *testing.T, metric pdata.Metric, index int, labelName string) { +func AssertDoubleMetricLabelExists(t *testing.T, metric dataold.Metric, index int, labelName string) { _, ok := metric.DoubleDataPoints().At(index).LabelsMap().Get(labelName) assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.MetricDescriptor().Name()) } -func AssertInt64MetricStartTimeEquals(t *testing.T, metric pdata.Metric, startTime pdata.TimestampUnixNano) { +func AssertInt64MetricStartTimeEquals(t *testing.T, metric dataold.Metric, startTime pdata.TimestampUnixNano) { idps := metric.Int64DataPoints() for i := 0; i < idps.Len(); i++ { require.Equal(t, startTime, idps.At(i).StartTime()) } } -func AssertDoubleMetricStartTimeEquals(t *testing.T, metric pdata.Metric, startTime pdata.TimestampUnixNano) { +func AssertDoubleMetricStartTimeEquals(t *testing.T, metric dataold.Metric, startTime pdata.TimestampUnixNano) { ddps := metric.DoubleDataPoints() for i := 0; i < ddps.Len(); i++ { require.Equal(t, startTime, ddps.At(i).StartTime()) } } -func AssertSameTimeStampForAllMetrics(t *testing.T, metrics pdata.MetricSlice) { +func AssertSameTimeStampForAllMetrics(t *testing.T, metrics dataold.MetricSlice) { AssertSameTimeStampForMetrics(t, metrics, 0, metrics.Len()) } -func AssertSameTimeStampForMetrics(t *testing.T, metrics pdata.MetricSlice, startIdx, endIdx int) { +func AssertSameTimeStampForMetrics(t *testing.T, metrics dataold.MetricSlice, startIdx, endIdx int) { var ts pdata.TimestampUnixNano for i := startIdx; i < endIdx; i++ { metric := metrics.At(i) diff --git a/receiver/hostmetricsreceiver/internal/utils.go b/receiver/hostmetricsreceiver/internal/utils.go index a563370fecb..e834643c523 100644 --- a/receiver/hostmetricsreceiver/internal/utils.go +++ b/receiver/hostmetricsreceiver/internal/utils.go @@ -18,11 +18,11 @@ import ( "time" "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) // Initializes a metric with a metric slice and returns it. -func InitializeMetricSlice(metricData data.MetricData) pdata.MetricSlice { +func InitializeMetricSlice(metricData dataold.MetricData) dataold.MetricSlice { rms := metricData.ResourceMetrics() rms.Resize(1) rm := rms.At(0) diff --git a/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils.go b/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils.go index 10813271772..e3a68c62dfa 100644 --- a/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils.go +++ b/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils.go @@ -20,6 +20,7 @@ import ( "time" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/third_party/telegraf/win_perf_counters" ) @@ -29,10 +30,10 @@ import ( // The performance counters' "instance" will be recorded // against the supplied label name func InitializeMetric( - metric pdata.Metric, + metric dataold.Metric, vals []win_perf_counters.CounterValue, instanceNameLabel string, -) pdata.Metric { +) dataold.Metric { ddps := metric.DoubleDataPoints() ddps.Resize(len(vals)) diff --git a/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils_test.go b/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils_test.go index 36fab750951..cc0eb0847d9 100644 --- a/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils_test.go +++ b/receiver/hostmetricsreceiver/internal/windows/pdh/performance_counter_utils_test.go @@ -22,13 +22,14 @@ import ( "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/consumer/pdata" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/third_party/telegraf/win_perf_counters" ) func TestPerfCounter_InitializeMetric_NoLabels(t *testing.T) { data := []win_perf_counters.CounterValue{{InstanceName: "_Total", Value: 100}} - metric := pdata.NewMetric() + metric := dataold.NewMetric() metric.InitEmpty() InitializeMetric(metric, data, "") @@ -41,7 +42,7 @@ func TestPerfCounter_InitializeMetric_NoLabels(t *testing.T) { func TestPerfCounter_InitializeMetric_Labels(t *testing.T) { data := []win_perf_counters.CounterValue{{InstanceName: "label_value_1", Value: 20}, {InstanceName: "label_value_2", Value: 50}} - metric := pdata.NewMetric() + metric := dataold.NewMetric() metric.InitEmpty() InitializeMetric(metric, data, "label") diff --git a/receiver/opencensusreceiver/ocmetrics/opencensus_test.go b/receiver/opencensusreceiver/ocmetrics/opencensus_test.go index f3c1ffc2d1c..6f5bfca2363 100644 --- a/receiver/opencensusreceiver/ocmetrics/opencensus_test.go +++ b/receiver/opencensusreceiver/ocmetrics/opencensus_test.go @@ -42,7 +42,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" "go.opentelemetry.io/collector/exporter/opencensusexporter" - "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/obsreport" "go.opentelemetry.io/collector/testutil" ) @@ -68,7 +68,7 @@ func TestReceiver_endToEnd(t *testing.T) { require.NoError(t, oce.Shutdown(context.Background())) }() - md := testdata.GenerateMetricDataOneMetric() + md := testdataold.GenerateMetricDataOneMetric() assert.NoError(t, oce.ConsumeMetrics(context.Background(), pdatautil.MetricsFromInternalMetrics(md))) testutil.WaitFor(t, func() bool { diff --git a/receiver/otlpreceiver/metrics/otlp.go b/receiver/otlpreceiver/metrics/otlp.go index 545a89e62dc..a85d03619ab 100644 --- a/receiver/otlpreceiver/metrics/otlp.go +++ b/receiver/otlpreceiver/metrics/otlp.go @@ -20,8 +20,8 @@ import ( "go.opentelemetry.io/collector/client" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" collectormetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/metrics/v1" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/obsreport" ) @@ -52,7 +52,7 @@ const ( func (r *Receiver) Export(ctx context.Context, req *collectormetrics.ExportMetricsServiceRequest) (*collectormetrics.ExportMetricsServiceResponse, error) { receiverCtx := obsreport.ReceiverContext(ctx, r.instanceName, receiverTransport, receiverTagValue) - md := data.MetricDataFromOtlp(req.ResourceMetrics) + md := dataold.MetricDataFromOtlp(req.ResourceMetrics) err := r.sendToNextConsumer(receiverCtx, md) if err != nil { @@ -62,7 +62,7 @@ func (r *Receiver) Export(ctx context.Context, req *collectormetrics.ExportMetri return &collectormetrics.ExportMetricsServiceResponse{}, nil } -func (r *Receiver) sendToNextConsumer(ctx context.Context, md data.MetricData) error { +func (r *Receiver) sendToNextConsumer(ctx context.Context, md dataold.MetricData) error { metricCount, dataPointCount := md.MetricAndDataPointCount() if metricCount == 0 { return nil diff --git a/receiver/otlpreceiver/metrics/otlp_test.go b/receiver/otlpreceiver/metrics/otlp_test.go index 598bd66c9ad..421d679bb03 100644 --- a/receiver/otlpreceiver/metrics/otlp_test.go +++ b/receiver/otlpreceiver/metrics/otlp_test.go @@ -27,10 +27,10 @@ import ( "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/exporter/exportertest" - "go.opentelemetry.io/collector/internal/data" collectormetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/collector/metrics/v1" otlpcommon "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/obsreport" "go.opentelemetry.io/collector/testutil" ) @@ -99,7 +99,7 @@ func TestExport(t *testing.T) { // Keep metric data to compare the test result against it // Clone needed because OTLP proto XXX_ fields are altered in the GRPC downstream - metricData := data.MetricDataFromOtlp(resourceMetrics).Clone() + metricData := dataold.MetricDataFromOtlp(resourceMetrics).Clone() req := &collectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: resourceMetrics, diff --git a/service/builder/receivers_builder_test.go b/service/builder/receivers_builder_test.go index e3f61db6c94..ba6cc89914f 100644 --- a/service/builder/receivers_builder_test.go +++ b/service/builder/receivers_builder_test.go @@ -29,6 +29,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/processor/attributesprocessor" "go.opentelemetry.io/collector/receiver/receiverhelper" ) @@ -139,7 +140,7 @@ func testReceivers( traceProducer.TraceConsumer.ConsumeTraces(context.Background(), testdata.GenerateTraceDataOneSpan()) } - metrics := pdatautil.MetricsFromInternalMetrics(testdata.GenerateMetricDataOneMetric()) + metrics := pdatautil.MetricsFromInternalMetrics(testdataold.GenerateMetricDataOneMetric()) if test.hasMetrics { metricsProducer := receiver.receiver.(*componenttest.ExampleReceiverProducer) metricsProducer.MetricsConsumer.ConsumeMetrics(context.Background(), metrics) diff --git a/testbed/correctness/metrics/metric_diff.go b/testbed/correctness/metrics/metric_diff.go index 73c58befcaf..56fc1995966 100644 --- a/testbed/correctness/metrics/metric_diff.go +++ b/testbed/correctness/metrics/metric_diff.go @@ -20,6 +20,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" + "go.opentelemetry.io/collector/internal/dataold" ) // MetricDiff is intended to support producing human-readable diffs between two MetricData structs during @@ -35,7 +36,7 @@ func (mf MetricDiff) String() string { return fmt.Sprintf("{msg='%v' expected=[%v] actual=[%v]}\n", mf.Msg, mf.ExpectedValue, mf.ActualValue) } -func pdmToPDRM(pdm []pdata.Metrics) (out []pdata.ResourceMetrics) { +func pdmToPDRM(pdm []pdata.Metrics) (out []dataold.ResourceMetrics) { for _, m := range pdm { md := pdatautil.MetricsToInternalMetrics(m) rms := md.ResourceMetrics() @@ -47,7 +48,7 @@ func pdmToPDRM(pdm []pdata.Metrics) (out []pdata.ResourceMetrics) { return out } -func diffRMSlices(sent []pdata.ResourceMetrics, recd []pdata.ResourceMetrics) []*MetricDiff { +func diffRMSlices(sent []dataold.ResourceMetrics, recd []dataold.ResourceMetrics) []*MetricDiff { var diffs []*MetricDiff if len(sent) != len(recd) { return []*MetricDiff{{ @@ -64,7 +65,7 @@ func diffRMSlices(sent []pdata.ResourceMetrics, recd []pdata.ResourceMetrics) [] return diffs } -func diffRMs(diffs []*MetricDiff, expected pdata.ResourceMetrics, actual pdata.ResourceMetrics) []*MetricDiff { +func diffRMs(diffs []*MetricDiff, expected dataold.ResourceMetrics, actual dataold.ResourceMetrics) []*MetricDiff { diffs = diffResource(diffs, expected.Resource(), actual.Resource()) diffs = diffILMSlice( diffs, @@ -76,8 +77,8 @@ func diffRMs(diffs []*MetricDiff, expected pdata.ResourceMetrics, actual pdata.R func diffILMSlice( diffs []*MetricDiff, - expected pdata.InstrumentationLibraryMetricsSlice, - actual pdata.InstrumentationLibraryMetricsSlice, + expected dataold.InstrumentationLibraryMetricsSlice, + actual dataold.InstrumentationLibraryMetricsSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, actual.Len(), expected.Len(), "InstrumentationLibraryMetricsSlice len") @@ -92,13 +93,13 @@ func diffILMSlice( func diffILM( diffs []*MetricDiff, - expected pdata.InstrumentationLibraryMetrics, - actual pdata.InstrumentationLibraryMetrics, + expected dataold.InstrumentationLibraryMetrics, + actual dataold.InstrumentationLibraryMetrics, ) []*MetricDiff { return diffMetrics(diffs, expected.Metrics(), actual.Metrics()) } -func diffMetrics(diffs []*MetricDiff, expected pdata.MetricSlice, actual pdata.MetricSlice) []*MetricDiff { +func diffMetrics(diffs []*MetricDiff, expected dataold.MetricSlice, actual dataold.MetricSlice) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, actual.Len(), expected.Len(), "MetricSlice len") if mismatch { @@ -110,7 +111,7 @@ func diffMetrics(diffs []*MetricDiff, expected pdata.MetricSlice, actual pdata.M return diffs } -func DiffMetric(diffs []*MetricDiff, expected pdata.Metric, actual pdata.Metric) []*MetricDiff { +func DiffMetric(diffs []*MetricDiff, expected dataold.Metric, actual dataold.Metric) []*MetricDiff { diffs = diffMetricDescriptor(diffs, expected.MetricDescriptor(), actual.MetricDescriptor()) diffs = diffInt64Pts(diffs, expected.Int64DataPoints(), actual.Int64DataPoints()) diffs = diffDoublePts(diffs, expected.DoubleDataPoints(), actual.DoubleDataPoints()) @@ -121,8 +122,8 @@ func DiffMetric(diffs []*MetricDiff, expected pdata.Metric, actual pdata.Metric) func diffSummaryPts( diffs []*MetricDiff, - expected pdata.SummaryDataPointSlice, - actual pdata.SummaryDataPointSlice, + expected dataold.SummaryDataPointSlice, + actual dataold.SummaryDataPointSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, actual.Len(), expected.Len(), "MetricSlice len") @@ -137,8 +138,8 @@ func diffSummaryPts( func diffSummaryPt( diffs []*MetricDiff, - expected pdata.SummaryDataPoint, - actual pdata.SummaryDataPoint, + expected dataold.SummaryDataPoint, + actual dataold.SummaryDataPoint, ) []*MetricDiff { diffs = diff(diffs, expected.Count(), actual.Count(), "SummaryDataPoint Count") diffs = diff(diffs, expected.Sum(), actual.Sum(), "SummaryDataPoint Sum") @@ -148,8 +149,8 @@ func diffSummaryPt( func diffPercentiles( diffs []*MetricDiff, - expected pdata.SummaryValueAtPercentileSlice, - actual pdata.SummaryValueAtPercentileSlice, + expected dataold.SummaryValueAtPercentileSlice, + actual dataold.SummaryValueAtPercentileSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, expected.Len(), actual.Len(), "MetricSlice len") @@ -164,8 +165,8 @@ func diffPercentiles( func diffSummaryAtPct( diffs []*MetricDiff, - expected pdata.SummaryValueAtPercentile, - actual pdata.SummaryValueAtPercentile, + expected dataold.SummaryValueAtPercentile, + actual dataold.SummaryValueAtPercentile, ) []*MetricDiff { diffs = diff(diffs, expected.Value(), actual.Value(), "SummaryValueAtPercentile Value") diffs = diff(diffs, expected.Percentile(), actual.Percentile(), "SummaryValueAtPercentile Percentile") @@ -174,8 +175,8 @@ func diffSummaryAtPct( func diffMetricDescriptor( diffs []*MetricDiff, - expected pdata.MetricDescriptor, - actual pdata.MetricDescriptor, + expected dataold.MetricDescriptor, + actual dataold.MetricDescriptor, ) []*MetricDiff { diffs = diff(diffs, expected.Type(), actual.Type(), "MetricDescriptor Type") diffs = diff(diffs, expected.Name(), actual.Name(), "MetricDescriptor Name") @@ -186,8 +187,8 @@ func diffMetricDescriptor( func diffDoublePts( diffs []*MetricDiff, - expected pdata.DoubleDataPointSlice, - actual pdata.DoubleDataPointSlice, + expected dataold.DoubleDataPointSlice, + actual dataold.DoubleDataPointSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, expected.Len(), actual.Len(), "DoubleDataPointSlice len") @@ -202,16 +203,16 @@ func diffDoublePts( func diffDoublePt( diffs []*MetricDiff, - expected pdata.DoubleDataPoint, - actual pdata.DoubleDataPoint, + expected dataold.DoubleDataPoint, + actual dataold.DoubleDataPoint, ) []*MetricDiff { return diff(diffs, expected.Value(), actual.Value(), "DoubleDataPoint value") } func diffHistogramPts( diffs []*MetricDiff, - expected pdata.HistogramDataPointSlice, - actual pdata.HistogramDataPointSlice, + expected dataold.HistogramDataPointSlice, + actual dataold.HistogramDataPointSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, expected.Len(), actual.Len(), "HistogramDataPointSlice len") @@ -226,8 +227,8 @@ func diffHistogramPts( func diffHistogramPt( diffs []*MetricDiff, - expected pdata.HistogramDataPoint, - actual pdata.HistogramDataPoint, + expected dataold.HistogramDataPoint, + actual dataold.HistogramDataPoint, ) []*MetricDiff { diffs = diff(diffs, expected.Count(), actual.Count(), "HistogramDataPoint Count") diffs = diff(diffs, expected.Sum(), actual.Sum(), "HistogramDataPoint Sum") @@ -238,8 +239,8 @@ func diffHistogramPt( func diffBuckets( diffs []*MetricDiff, - expected pdata.HistogramBucketSlice, - actual pdata.HistogramBucketSlice, + expected dataold.HistogramBucketSlice, + actual dataold.HistogramBucketSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, expected.Len(), actual.Len(), "HistogramBucketSlice len") @@ -254,8 +255,8 @@ func diffBuckets( func diffBucket( diffs []*MetricDiff, - expected pdata.HistogramBucket, - actual pdata.HistogramBucket, + expected dataold.HistogramBucket, + actual dataold.HistogramBucket, ) []*MetricDiff { diffs = diff(diffs, expected.Count(), actual.Count(), "HistogramBucket Count") diffs = diffExemplar(diffs, expected.Exemplar(), actual.Exemplar()) @@ -264,8 +265,8 @@ func diffBucket( func diffExemplar( diffs []*MetricDiff, - expected pdata.HistogramBucketExemplar, - actual pdata.HistogramBucketExemplar, + expected dataold.HistogramBucketExemplar, + actual dataold.HistogramBucketExemplar, ) []*MetricDiff { diffs = diff(diffs, expected.IsNil(), actual.IsNil(), "HistogramBucketExemplar IsNil") if expected.IsNil() || actual.IsNil() { @@ -276,8 +277,8 @@ func diffExemplar( func diffInt64Pts( diffs []*MetricDiff, - expected pdata.Int64DataPointSlice, - actual pdata.Int64DataPointSlice, + expected dataold.Int64DataPointSlice, + actual dataold.Int64DataPointSlice, ) []*MetricDiff { var mismatch bool diffs, mismatch = diffValues(diffs, expected.Len(), actual.Len(), "Int64DataPointSlice len") @@ -292,8 +293,8 @@ func diffInt64Pts( func diffInt64Pt( diffs []*MetricDiff, - expected pdata.Int64DataPoint, - actual pdata.Int64DataPoint, + expected dataold.Int64DataPoint, + actual dataold.Int64DataPoint, ) []*MetricDiff { return diff(diffs, expected.Value(), actual.Value(), "Int64DataPoint value") } diff --git a/testbed/correctness/metrics/metric_diff_test.go b/testbed/correctness/metrics/metric_diff_test.go index 6ec2ccc93a0..e7de655b1f2 100644 --- a/testbed/correctness/metrics/metric_diff_test.go +++ b/testbed/correctness/metrics/metric_diff_test.go @@ -22,7 +22,7 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/goldendataset" ) @@ -33,13 +33,13 @@ func TestSameMetrics(t *testing.T) { assert.Nil(t, diffs) } -func diffMetricData(expected data.MetricData, actual data.MetricData) []*MetricDiff { +func diffMetricData(expected dataold.MetricData, actual dataold.MetricData) []*MetricDiff { expectedRMSlice := expected.ResourceMetrics() actualRMSlice := actual.ResourceMetrics() return diffRMSlices(toSlice(expectedRMSlice), toSlice(actualRMSlice)) } -func toSlice(s pdata.ResourceMetricsSlice) (out []pdata.ResourceMetrics) { +func toSlice(s dataold.ResourceMetricsSlice) (out []dataold.ResourceMetrics) { for i := 0; i < s.Len(); i++ { out = append(out, s.At(i)) } @@ -67,7 +67,7 @@ func TestDifferentNumPts(t *testing.T) { func TestDifferentPtTypes(t *testing.T) { expected := goldendataset.DefaultMetricData() cfg := goldendataset.DefaultCfg() - cfg.MetricDescriptorType = pdata.MetricTypeDouble + cfg.MetricDescriptorType = dataold.MetricTypeDouble actual := goldendataset.MetricDataFromCfg(cfg) diffs := diffMetricData(expected, actual) assert.Equal(t, 3, len(diffs)) @@ -75,10 +75,10 @@ func TestDifferentPtTypes(t *testing.T) { func TestHistogram(t *testing.T) { cfg1 := goldendataset.DefaultCfg() - cfg1.MetricDescriptorType = pdata.MetricTypeHistogram + cfg1.MetricDescriptorType = dataold.MetricTypeHistogram expected := goldendataset.MetricDataFromCfg(cfg1) cfg2 := goldendataset.DefaultCfg() - cfg2.MetricDescriptorType = pdata.MetricTypeHistogram + cfg2.MetricDescriptorType = dataold.MetricTypeHistogram cfg2.PtVal = 2 actual := goldendataset.MetricDataFromCfg(cfg2) diffs := diffMetricData(expected, actual) @@ -87,10 +87,10 @@ func TestHistogram(t *testing.T) { func TestSummary(t *testing.T) { cfg1 := goldendataset.DefaultCfg() - cfg1.MetricDescriptorType = pdata.MetricTypeSummary + cfg1.MetricDescriptorType = dataold.MetricTypeSummary expected := goldendataset.MetricDataFromCfg(cfg1) cfg2 := goldendataset.DefaultCfg() - cfg2.MetricDescriptorType = pdata.MetricTypeSummary + cfg2.MetricDescriptorType = dataold.MetricTypeSummary cfg2.PtVal = 2 actual := goldendataset.MetricDataFromCfg(cfg2) diffs := diffMetricData(expected, actual) @@ -98,7 +98,7 @@ func TestSummary(t *testing.T) { } func TestPDMToPDRM(t *testing.T) { - md := data.NewMetricData() + md := dataold.NewMetricData() md.ResourceMetrics().Resize(1) rm := pdmToPDRM([]pdata.Metrics{pdatautil.MetricsFromInternalMetrics(md)}) require.Equal(t, 1, len(rm)) diff --git a/testbed/testbed/data_providers.go b/testbed/testbed/data_providers.go index a1549bfffcf..d84dba6940f 100644 --- a/testbed/testbed/data_providers.go +++ b/testbed/testbed/data_providers.go @@ -28,8 +28,8 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" otlptrace "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/trace/v1" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/internal/goldendataset" ) @@ -123,7 +123,7 @@ func (dp *PerfTestDataProvider) GenerateMetrics() (pdata.Metrics, bool) { // Generate 7 data points per metric. const dataPointsPerMetric = 7 - metricData := data.NewMetricData() + metricData := dataold.NewMetricData() metricData.ResourceMetrics().Resize(1) metricData.ResourceMetrics().At(0).InstrumentationLibraryMetrics().Resize(1) if dp.options.Attributes != nil { @@ -142,7 +142,7 @@ func (dp *PerfTestDataProvider) GenerateMetrics() (pdata.Metrics, bool) { metricDescriptor.InitEmpty() metricDescriptor.SetName("load_generator_" + strconv.Itoa(i)) metricDescriptor.SetDescription("Load Generator Counter #" + strconv.Itoa(i)) - metricDescriptor.SetType(pdata.MetricTypeInt64) + metricDescriptor.SetType(dataold.MetricTypeInt64) batchIndex := dp.batchesGenerated.Inc() @@ -261,7 +261,7 @@ func (dp *GoldenDataProvider) GenerateTraces() (pdata.Traces, bool) { } func (dp *GoldenDataProvider) GenerateMetrics() (pdata.Metrics, bool) { - return pdatautil.MetricsFromInternalMetrics(data.MetricData{}), true + return pdatautil.MetricsFromInternalMetrics(dataold.MetricData{}), true } func (dp *GoldenDataProvider) GenerateLogs() (pdata.Logs, bool) { diff --git a/testbed/tests/resource_processor_test.go b/testbed/tests/resource_processor_test.go index fcb72c840ac..18b85456a6a 100644 --- a/testbed/tests/resource_processor_test.go +++ b/testbed/tests/resource_processor_test.go @@ -24,10 +24,10 @@ import ( "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/consumer/pdatautil" - "go.opentelemetry.io/collector/internal/data" otlpcommon "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/common/v1" - otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1" + otlpmetrics "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/metrics/v1old" otlpresource "go.opentelemetry.io/collector/internal/data/opentelemetry-proto-gen/resource/v1" + "go.opentelemetry.io/collector/internal/dataold" "go.opentelemetry.io/collector/testbed/testbed" ) @@ -208,7 +208,7 @@ func getResourceProcessorTestCases(t *testing.T) []resourceProcessorTestCase { } func getMetricDataFromResourceMetrics(rm *otlpmetrics.ResourceMetrics) pdata.Metrics { - return pdatautil.MetricsFromInternalMetrics(data.MetricDataFromOtlp([]*otlpmetrics.ResourceMetrics{rm})) + return pdatautil.MetricsFromInternalMetrics(dataold.MetricDataFromOtlp([]*otlpmetrics.ResourceMetrics{rm})) } func getMetricDataFromJSON(t *testing.T, rmString string) pdata.Metrics { @@ -217,7 +217,7 @@ func getMetricDataFromJSON(t *testing.T, rmString string) pdata.Metrics { err := jsonpb.UnmarshalString(rmString, &mockedResourceMetrics) require.NoError(t, err, "failed to get mocked resource metrics object", err) - return pdatautil.MetricsFromInternalMetrics(data.MetricDataFromOtlp([]*otlpmetrics.ResourceMetrics{&mockedResourceMetrics})) + return pdatautil.MetricsFromInternalMetrics(dataold.MetricDataFromOtlp([]*otlpmetrics.ResourceMetrics{&mockedResourceMetrics})) } func TestMetricResourceProcessor(t *testing.T) { diff --git a/translator/internaldata/metrics_to_oc.go b/translator/internaldata/metrics_to_oc.go index c9b42d7c462..ac1eb42774d 100644 --- a/translator/internaldata/metrics_to_oc.go +++ b/translator/internaldata/metrics_to_oc.go @@ -23,7 +23,7 @@ import ( "go.opentelemetry.io/collector/consumer/consumerdata" "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/internal" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) const ( @@ -38,7 +38,7 @@ type labelKeys struct { keyIndices map[string]int } -func MetricDataToOC(md data.MetricData) []consumerdata.MetricsData { +func MetricDataToOC(md dataold.MetricData) []consumerdata.MetricsData { resourceMetrics := md.ResourceMetrics() if resourceMetrics.Len() == 0 { @@ -57,7 +57,7 @@ func MetricDataToOC(md data.MetricData) []consumerdata.MetricsData { return ocResourceMetricsList } -func ResourceMetricsToOC(rm pdata.ResourceMetrics) consumerdata.MetricsData { +func ResourceMetricsToOC(rm dataold.ResourceMetrics) consumerdata.MetricsData { ocMetricsData := consumerdata.MetricsData{} ocMetricsData.Node, ocMetricsData.Resource = internalResourceToOC(rm.Resource()) ilms := rm.InstrumentationLibraryMetrics() @@ -88,7 +88,7 @@ func ResourceMetricsToOC(rm pdata.ResourceMetrics) consumerdata.MetricsData { return ocMetricsData } -func metricToOC(metric pdata.Metric) *ocmetrics.Metric { +func metricToOC(metric dataold.Metric) *ocmetrics.Metric { labelKeys := collectLabelKeys(metric) return &ocmetrics.Metric{ MetricDescriptor: descriptorToOC(metric.MetricDescriptor(), labelKeys), @@ -97,7 +97,7 @@ func metricToOC(metric pdata.Metric) *ocmetrics.Metric { } } -func collectLabelKeys(metric pdata.Metric) *labelKeys { +func collectLabelKeys(metric dataold.Metric) *labelKeys { // NOTE: Internal data structure and OpenCensus have different representations of labels: // - OC has a single "global" ordered list of label keys per metric in the MetricDescriptor; // then, every data point has an ordered list of label values matching the key index. @@ -187,7 +187,7 @@ func addLabelKeys(keySet map[string]struct{}, labels pdata.StringMap) { }) } -func descriptorToOC(descriptor pdata.MetricDescriptor, labelKeys *labelKeys) *ocmetrics.MetricDescriptor { +func descriptorToOC(descriptor dataold.MetricDescriptor, labelKeys *labelKeys) *ocmetrics.MetricDescriptor { if descriptor.IsNil() { return nil } @@ -200,28 +200,28 @@ func descriptorToOC(descriptor pdata.MetricDescriptor, labelKeys *labelKeys) *oc } } -func descriptorTypeToOC(t pdata.MetricType) ocmetrics.MetricDescriptor_Type { +func descriptorTypeToOC(t dataold.MetricType) ocmetrics.MetricDescriptor_Type { switch t { - case pdata.MetricTypeInvalid: + case dataold.MetricTypeInvalid: return ocmetrics.MetricDescriptor_UNSPECIFIED - case pdata.MetricTypeInt64: + case dataold.MetricTypeInt64: return ocmetrics.MetricDescriptor_GAUGE_INT64 - case pdata.MetricTypeDouble: + case dataold.MetricTypeDouble: return ocmetrics.MetricDescriptor_GAUGE_DOUBLE - case pdata.MetricTypeMonotonicInt64: + case dataold.MetricTypeMonotonicInt64: return ocmetrics.MetricDescriptor_CUMULATIVE_INT64 - case pdata.MetricTypeMonotonicDouble: + case dataold.MetricTypeMonotonicDouble: return ocmetrics.MetricDescriptor_CUMULATIVE_DOUBLE - case pdata.MetricTypeHistogram: + case dataold.MetricTypeHistogram: return ocmetrics.MetricDescriptor_CUMULATIVE_DISTRIBUTION - case pdata.MetricTypeSummary: + case dataold.MetricTypeSummary: return ocmetrics.MetricDescriptor_SUMMARY default: return invalidMetricDescriptorType } } -func dataPointsToTimeseries(metric pdata.Metric, labelKeys *labelKeys) []*ocmetrics.TimeSeries { +func dataPointsToTimeseries(metric dataold.Metric, labelKeys *labelKeys) []*ocmetrics.TimeSeries { length := metric.Int64DataPoints().Len() + metric.DoubleDataPoints().Len() + metric.HistogramDataPoints().Len() + metric.SummaryDataPoints().Len() if length == 0 { @@ -269,7 +269,7 @@ func dataPointsToTimeseries(metric pdata.Metric, labelKeys *labelKeys) []*ocmetr return timeseries } -func int64PointToOC(point pdata.Int64DataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { +func int64PointToOC(point dataold.Int64DataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { return &ocmetrics.TimeSeries{ StartTimestamp: internal.UnixNanoToTimestamp(point.StartTime()), LabelValues: labelValuesToOC(point.LabelsMap(), labelKeys), @@ -284,7 +284,7 @@ func int64PointToOC(point pdata.Int64DataPoint, labelKeys *labelKeys) *ocmetrics } } -func doublePointToOC(point pdata.DoubleDataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { +func doublePointToOC(point dataold.DoubleDataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { return &ocmetrics.TimeSeries{ StartTimestamp: internal.UnixNanoToTimestamp(point.StartTime()), LabelValues: labelValuesToOC(point.LabelsMap(), labelKeys), @@ -299,7 +299,7 @@ func doublePointToOC(point pdata.DoubleDataPoint, labelKeys *labelKeys) *ocmetri } } -func histogramPointToOC(point pdata.HistogramDataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { +func histogramPointToOC(point dataold.HistogramDataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { return &ocmetrics.TimeSeries{ StartTimestamp: internal.UnixNanoToTimestamp(point.StartTime()), LabelValues: labelValuesToOC(point.LabelsMap(), labelKeys), @@ -334,7 +334,7 @@ func histogramExplicitBoundsToOC(bounds []float64) *ocmetrics.DistributionValue_ } } -func histogramBucketsToOC(buckets pdata.HistogramBucketSlice) []*ocmetrics.DistributionValue_Bucket { +func histogramBucketsToOC(buckets dataold.HistogramBucketSlice) []*ocmetrics.DistributionValue_Bucket { if buckets.Len() == 0 { return nil } @@ -350,7 +350,7 @@ func histogramBucketsToOC(buckets pdata.HistogramBucketSlice) []*ocmetrics.Distr return ocBuckets } -func exemplarToOC(exemplar pdata.HistogramBucketExemplar) *ocmetrics.DistributionValue_Exemplar { +func exemplarToOC(exemplar dataold.HistogramBucketExemplar) *ocmetrics.DistributionValue_Exemplar { if exemplar.IsNil() { return nil } @@ -374,7 +374,7 @@ func exemplarToOC(exemplar pdata.HistogramBucketExemplar) *ocmetrics.Distributio } } -func summaryPointToOC(point pdata.SummaryDataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { +func summaryPointToOC(point dataold.SummaryDataPoint, labelKeys *labelKeys) *ocmetrics.TimeSeries { return &ocmetrics.TimeSeries{ StartTimestamp: internal.UnixNanoToTimestamp(point.StartTime()), LabelValues: labelValuesToOC(point.LabelsMap(), labelKeys), @@ -393,7 +393,7 @@ func summaryPointToOC(point pdata.SummaryDataPoint, labelKeys *labelKeys) *ocmet } } -func percentileToOC(percentiles pdata.SummaryValueAtPercentileSlice) *ocmetrics.SummaryValue_Snapshot { +func percentileToOC(percentiles dataold.SummaryValueAtPercentileSlice) *ocmetrics.SummaryValue_Snapshot { if percentiles.Len() == 0 { return nil } diff --git a/translator/internaldata/metrics_to_oc_test.go b/translator/internaldata/metrics_to_oc_test.go index e86b1342075..fc93368da8b 100644 --- a/translator/internaldata/metrics_to_oc_test.go +++ b/translator/internaldata/metrics_to_oc_test.go @@ -26,14 +26,14 @@ import ( "go.opentelemetry.io/collector/consumer/consumerdata" "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal/data" - "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/translator/conventions" ) func TestMetricsDataToOC(t *testing.T) { - sampleMetricData := testdata.GenerateMetricDataWithCountersHistogramAndSummary() + sampleMetricData := testdataold.GenerateMetricDataWithCountersHistogramAndSummary() attrs := sampleMetricData.ResourceMetrics().At(0).Resource().Attributes() attrs.Upsert(conventions.AttributeHostHostname, pdata.NewAttributeValueString("host1")) attrs.Upsert(conventions.OCAttributeProcessID, pdata.NewAttributeValueInt(123)) @@ -44,18 +44,18 @@ func TestMetricsDataToOC(t *testing.T) { tests := []struct { name string - internal data.MetricData + internal dataold.MetricData oc []consumerdata.MetricsData }{ { name: "empty", - internal: testdata.GenerateMetricDataEmpty(), + internal: testdataold.GenerateMetricDataEmpty(), oc: []consumerdata.MetricsData(nil), }, { name: "one-empty-resource-metrics", - internal: testdata.GenerateMetricDataOneEmptyResourceMetrics(), + internal: testdataold.GenerateMetricDataOneEmptyResourceMetrics(), oc: []consumerdata.MetricsData{ {}, }, @@ -63,7 +63,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-empty-one-nil-resource-metrics", - internal: testdata.GenerateMetricDataOneEmptyOneNilResourceMetrics(), + internal: testdataold.GenerateMetricDataOneEmptyOneNilResourceMetrics(), oc: []consumerdata.MetricsData{ {}, }, @@ -71,7 +71,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "no-libraries", - internal: testdata.GenerateMetricDataNoLibraries(), + internal: testdataold.GenerateMetricDataNoLibraries(), oc: []consumerdata.MetricsData{ generateOCTestDataNoMetrics(), }, @@ -79,7 +79,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-empty-instrumentation-library", - internal: testdata.GenerateMetricDataOneEmptyInstrumentationLibrary(), + internal: testdataold.GenerateMetricDataOneEmptyInstrumentationLibrary(), oc: []consumerdata.MetricsData{ generateOCTestDataNoMetrics(), }, @@ -87,7 +87,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-empty-one-nil-instrumentation-library", - internal: testdata.GenerateMetricDataOneEmptyOneNilInstrumentationLibrary(), + internal: testdataold.GenerateMetricDataOneEmptyOneNilInstrumentationLibrary(), oc: []consumerdata.MetricsData{ generateOCTestDataNoMetrics(), }, @@ -95,7 +95,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-metric-no-resource", - internal: testdata.GenerateMetricDataOneMetricNoResource(), + internal: testdataold.GenerateMetricDataOneMetricNoResource(), oc: []consumerdata.MetricsData{ { Metrics: []*ocmetrics.Metric{ @@ -107,7 +107,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-metric", - internal: testdata.GenerateMetricDataOneMetric(), + internal: testdataold.GenerateMetricDataOneMetric(), oc: []consumerdata.MetricsData{ generateOCTestDataMetricsOneMetric(), }, @@ -115,7 +115,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-metric-one-nil", - internal: testdata.GenerateMetricDataOneMetricOneNil(), + internal: testdataold.GenerateMetricDataOneMetricOneNil(), oc: []consumerdata.MetricsData{ generateOCTestDataMetricsOneMetric(), }, @@ -123,7 +123,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-metric-one-nil-point", - internal: testdata.GenerateMetricDataOneMetricOneNilPoint(), + internal: testdataold.GenerateMetricDataOneMetricOneNilPoint(), oc: []consumerdata.MetricsData{ generateOCTestDataMetricsOneMetric(), }, @@ -131,7 +131,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "one-metric-no-labels", - internal: testdata.GenerateMetricDataOneMetricNoLabels(), + internal: testdataold.GenerateMetricDataOneMetricNoLabels(), oc: []consumerdata.MetricsData{ generateOCTestDataNoLabels(), }, @@ -139,7 +139,7 @@ func TestMetricsDataToOC(t *testing.T) { { name: "all-types-no-data-points", - internal: testdata.GenerateMetricDataAllTypesNoDataPoints(), + internal: testdataold.GenerateMetricDataAllTypesNoDataPoints(), oc: []consumerdata.MetricsData{ generateOCTestDataNoPoints(), }, diff --git a/translator/internaldata/oc_testdata_test.go b/translator/internaldata/oc_testdata_test.go index d3a6c0faead..fe85c044880 100644 --- a/translator/internaldata/oc_testdata_test.go +++ b/translator/internaldata/oc_testdata_test.go @@ -26,6 +26,7 @@ import ( "go.opentelemetry.io/collector/consumer/consumerdata" "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold/testdataold" "go.opentelemetry.io/collector/translator/conventions" ) @@ -158,7 +159,7 @@ func generateOCTestDataMetricsOneMetricOneNilPoint() consumerdata.MetricsData { func generateOCTestMetricInt() *ocmetrics.Metric { return &ocmetrics.Metric{ MetricDescriptor: &ocmetrics.MetricDescriptor{ - Name: testdata.TestCounterIntMetricName, + Name: testdataold.TestCounterIntMetricName, Description: "", Unit: "1", Type: ocmetrics.MetricDescriptor_CUMULATIVE_INT64, @@ -169,7 +170,7 @@ func generateOCTestMetricInt() *ocmetrics.Metric { }, Timeseries: []*ocmetrics.TimeSeries{ { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -183,7 +184,7 @@ func generateOCTestMetricInt() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_Int64Value{ Int64Value: 123, }, @@ -191,7 +192,7 @@ func generateOCTestMetricInt() *ocmetrics.Metric { }, }, { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -205,7 +206,7 @@ func generateOCTestMetricInt() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_Int64Value{ Int64Value: 456, }, @@ -230,7 +231,7 @@ func generateOCTestMetricDouble() *ocmetrics.Metric { }, Timeseries: []*ocmetrics.TimeSeries{ { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -249,7 +250,7 @@ func generateOCTestMetricDouble() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_DoubleValue{ DoubleValue: 1.23, }, @@ -257,7 +258,7 @@ func generateOCTestMetricDouble() *ocmetrics.Metric { }, }, { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -276,7 +277,7 @@ func generateOCTestMetricDouble() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_DoubleValue{ DoubleValue: 4.56, }, @@ -290,7 +291,7 @@ func generateOCTestMetricDouble() *ocmetrics.Metric { func generateOCTestMetricHistogram() *ocmetrics.Metric { return &ocmetrics.Metric{ MetricDescriptor: &ocmetrics.MetricDescriptor{ - Name: testdata.TestCumulativeHistogramMetricName, + Name: testdataold.TestCumulativeHistogramMetricName, Description: "", Unit: "1", Type: ocmetrics.MetricDescriptor_CUMULATIVE_DISTRIBUTION, @@ -302,7 +303,7 @@ func generateOCTestMetricHistogram() *ocmetrics.Metric { }, Timeseries: []*ocmetrics.TimeSeries{ { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -321,7 +322,7 @@ func generateOCTestMetricHistogram() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_DistributionValue{ DistributionValue: &ocmetrics.DistributionValue{ Count: 1, @@ -332,7 +333,7 @@ func generateOCTestMetricHistogram() *ocmetrics.Metric { }, }, { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -350,7 +351,7 @@ func generateOCTestMetricHistogram() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_DistributionValue{ DistributionValue: &ocmetrics.DistributionValue{ Count: 1, @@ -369,7 +370,7 @@ func generateOCTestMetricHistogram() *ocmetrics.Metric { { Count: 1, Exemplar: &ocmetrics.DistributionValue_Exemplar{ - Timestamp: timestamppb.New(testdata.TestMetricExemplarTime), + Timestamp: timestamppb.New(testdataold.TestMetricExemplarTime), Value: 15, Attachments: map[string]string{testdata.TestAttachmentKey: testdata.TestAttachmentValue}, }, @@ -387,7 +388,7 @@ func generateOCTestMetricHistogram() *ocmetrics.Metric { func generateOCTestMetricSummary() *ocmetrics.Metric { return &ocmetrics.Metric{ MetricDescriptor: &ocmetrics.MetricDescriptor{ - Name: testdata.TestSummaryMetricName, + Name: testdataold.TestSummaryMetricName, Description: "", Unit: "1", Type: ocmetrics.MetricDescriptor_SUMMARY, @@ -397,7 +398,7 @@ func generateOCTestMetricSummary() *ocmetrics.Metric { }, Timeseries: []*ocmetrics.TimeSeries{ { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -407,7 +408,7 @@ func generateOCTestMetricSummary() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_SummaryValue{ SummaryValue: &ocmetrics.SummaryValue{ Count: &wrapperspb.Int64Value{ @@ -422,7 +423,7 @@ func generateOCTestMetricSummary() *ocmetrics.Metric { }, }, { - StartTimestamp: timestamppb.New(testdata.TestMetricStartTime), + StartTimestamp: timestamppb.New(testdataold.TestMetricStartTime), LabelValues: []*ocmetrics.LabelValue{ { // key1 @@ -432,7 +433,7 @@ func generateOCTestMetricSummary() *ocmetrics.Metric { }, Points: []*ocmetrics.Point{ { - Timestamp: timestamppb.New(testdata.TestMetricTime), + Timestamp: timestamppb.New(testdataold.TestMetricTime), Value: &ocmetrics.Point_SummaryValue{ SummaryValue: &ocmetrics.SummaryValue{ Count: &wrapperspb.Int64Value{ diff --git a/translator/internaldata/oc_to_metrics.go b/translator/internaldata/oc_to_metrics.go index 3a5cd195e14..acff74d9cb7 100644 --- a/translator/internaldata/oc_to_metrics.go +++ b/translator/internaldata/oc_to_metrics.go @@ -21,16 +21,16 @@ import ( "go.opentelemetry.io/collector/consumer/consumerdata" "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/internal" - "go.opentelemetry.io/collector/internal/data" + "go.opentelemetry.io/collector/internal/dataold" ) const ( - invalidMetricType = pdata.MetricType(-1) + invalidMetricType = dataold.MetricType(-1) ) // OCSliceToMetricData converts a slice of OC data format to MetricData. -func OCSliceToMetricData(ocmds []consumerdata.MetricsData) data.MetricData { - metricData := data.NewMetricData() +func OCSliceToMetricData(ocmds []consumerdata.MetricsData) dataold.MetricData { + metricData := dataold.NewMetricData() if len(ocmds) == 0 { return metricData } @@ -41,13 +41,13 @@ func OCSliceToMetricData(ocmds []consumerdata.MetricsData) data.MetricData { } // OCToMetricData converts OC data format to MetricData. -func OCToMetricData(md consumerdata.MetricsData) data.MetricData { - metricData := data.NewMetricData() +func OCToMetricData(md consumerdata.MetricsData) dataold.MetricData { + metricData := dataold.NewMetricData() appendOcToMetriData(md, metricData) return metricData } -func appendOcToMetriData(md consumerdata.MetricsData, dest data.MetricData) { +func appendOcToMetriData(md consumerdata.MetricsData, dest dataold.MetricData) { if md.Node == nil && md.Resource == nil && len(md.Metrics) == 0 { return } @@ -136,7 +136,7 @@ func appendOcToMetriData(md consumerdata.MetricsData, dest data.MetricData) { } } -func ocMetricToResourceMetrics(ocMetric *ocmetrics.Metric, node *occommon.Node, out pdata.ResourceMetrics) { +func ocMetricToResourceMetrics(ocMetric *ocmetrics.Metric, node *occommon.Node, out dataold.ResourceMetrics) { ocNodeResourceToInternal(node, ocMetric.Resource, out.Resource()) ilms := out.InstrumentationLibraryMetrics() ilms.Resize(1) @@ -146,12 +146,12 @@ func ocMetricToResourceMetrics(ocMetric *ocmetrics.Metric, node *occommon.Node, } // ocMetricToInternal conversts ocMetric to internal representation and fill metric -func ocMetricToInternal(ocMetric *ocmetrics.Metric, metric pdata.Metric) { +func ocMetricToInternal(ocMetric *ocmetrics.Metric, metric dataold.Metric) { descriptorToInternal(ocMetric.GetMetricDescriptor(), metric.MetricDescriptor()) setDataPoints(ocMetric, metric) } -func descriptorToInternal(ocDescriptor *ocmetrics.MetricDescriptor, descriptor pdata.MetricDescriptor) { +func descriptorToInternal(ocDescriptor *ocmetrics.MetricDescriptor, descriptor dataold.MetricDescriptor) { if ocDescriptor == nil { return } @@ -168,29 +168,29 @@ func descriptorToInternal(ocDescriptor *ocmetrics.MetricDescriptor, descriptor p descriptor.SetUnit(ocDescriptor.GetUnit()) } -func descriptorTypeToInternal(t ocmetrics.MetricDescriptor_Type) pdata.MetricType { +func descriptorTypeToInternal(t ocmetrics.MetricDescriptor_Type) dataold.MetricType { switch t { case ocmetrics.MetricDescriptor_UNSPECIFIED: - return pdata.MetricTypeInvalid + return dataold.MetricTypeInvalid case ocmetrics.MetricDescriptor_GAUGE_INT64: - return pdata.MetricTypeInt64 + return dataold.MetricTypeInt64 case ocmetrics.MetricDescriptor_GAUGE_DOUBLE: - return pdata.MetricTypeDouble + return dataold.MetricTypeDouble case ocmetrics.MetricDescriptor_CUMULATIVE_INT64: - return pdata.MetricTypeMonotonicInt64 + return dataold.MetricTypeMonotonicInt64 case ocmetrics.MetricDescriptor_CUMULATIVE_DOUBLE: - return pdata.MetricTypeMonotonicDouble + return dataold.MetricTypeMonotonicDouble case ocmetrics.MetricDescriptor_CUMULATIVE_DISTRIBUTION: - return pdata.MetricTypeHistogram + return dataold.MetricTypeHistogram case ocmetrics.MetricDescriptor_SUMMARY: - return pdata.MetricTypeSummary + return dataold.MetricTypeSummary default: return invalidMetricType } } // setDataPoints converts OC timeseries to internal datapoints based on metric type -func setDataPoints(ocMetric *ocmetrics.Metric, metric pdata.Metric) { +func setDataPoints(ocMetric *ocmetrics.Metric, metric dataold.Metric) { var int64DataPointsNum, doubleDataPointsNum, histogramDataPointsNum, summaryDataPointsNum int ocLabelsKeys := ocMetric.GetMetricDescriptor().GetLabelKeys() ocPointsCount := getPointsCount(ocMetric) @@ -296,15 +296,15 @@ func setLabelsMap(ocLabelsKeys []*ocmetrics.LabelKey, ocLabelValues []*ocmetrics } } -func setInt64DataPointValue(dataPoint pdata.Int64DataPoint, point *ocmetrics.Point) { +func setInt64DataPointValue(dataPoint dataold.Int64DataPoint, point *ocmetrics.Point) { dataPoint.SetValue(point.GetInt64Value()) } -func setDoubleDataPointValue(dataPoint pdata.DoubleDataPoint, point *ocmetrics.Point) { +func setDoubleDataPointValue(dataPoint dataold.DoubleDataPoint, point *ocmetrics.Point) { dataPoint.SetValue(point.GetDoubleValue()) } -func setHistogramDataPointValue(dataPoint pdata.HistogramDataPoint, point *ocmetrics.Point) { +func setHistogramDataPointValue(dataPoint dataold.HistogramDataPoint, point *ocmetrics.Point) { distributionValue := point.GetDistributionValue() dataPoint.SetSum(distributionValue.GetSum()) dataPoint.SetCount(uint64(distributionValue.GetCount())) @@ -312,7 +312,7 @@ func setHistogramDataPointValue(dataPoint pdata.HistogramDataPoint, point *ocmet dataPoint.SetExplicitBounds(distributionValue.GetBucketOptions().GetExplicit().GetBounds()) } -func histogramBucketsToInternal(ocBuckets []*ocmetrics.DistributionValue_Bucket, buckets pdata.HistogramBucketSlice) { +func histogramBucketsToInternal(ocBuckets []*ocmetrics.DistributionValue_Bucket, buckets dataold.HistogramBucketSlice) { buckets.Resize(len(ocBuckets)) for i := 0; i < buckets.Len(); i++ { bucket := buckets.At(i) @@ -324,7 +324,7 @@ func histogramBucketsToInternal(ocBuckets []*ocmetrics.DistributionValue_Bucket, } } -func exemplarToInternal(ocExemplar *ocmetrics.DistributionValue_Exemplar, exemplar pdata.HistogramBucketExemplar) { +func exemplarToInternal(ocExemplar *ocmetrics.DistributionValue_Exemplar, exemplar dataold.HistogramBucketExemplar) { if ocExemplar.GetTimestamp() != nil { exemplar.SetTimestamp(internal.TimestampToUnixNano(ocExemplar.GetTimestamp())) } @@ -337,7 +337,7 @@ func exemplarToInternal(ocExemplar *ocmetrics.DistributionValue_Exemplar, exempl } } -func setSummaryDataPointValue(dataPoint pdata.SummaryDataPoint, point *ocmetrics.Point) { +func setSummaryDataPointValue(dataPoint dataold.SummaryDataPoint, point *ocmetrics.Point) { summaryValue := point.GetSummaryValue() dataPoint.SetSum(summaryValue.GetSum().GetValue()) dataPoint.SetCount(uint64(summaryValue.GetCount().GetValue())) @@ -346,7 +346,7 @@ func setSummaryDataPointValue(dataPoint pdata.SummaryDataPoint, point *ocmetrics func percentileToInternal( ocPercentiles []*ocmetrics.SummaryValue_Snapshot_ValueAtPercentile, - percentiles pdata.SummaryValueAtPercentileSlice, + percentiles dataold.SummaryValueAtPercentileSlice, ) { percentiles.Resize(len(ocPercentiles)) for i := 0; i < percentiles.Len(); i++ { diff --git a/translator/internaldata/oc_to_metrics_test.go b/translator/internaldata/oc_to_metrics_test.go index ea12c257f5a..1852962ce27 100644 --- a/translator/internaldata/oc_to_metrics_test.go +++ b/translator/internaldata/oc_to_metrics_test.go @@ -23,20 +23,20 @@ import ( "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/consumer/consumerdata" - "go.opentelemetry.io/collector/internal/data" - "go.opentelemetry.io/collector/internal/data/testdata" + "go.opentelemetry.io/collector/internal/dataold" + "go.opentelemetry.io/collector/internal/dataold/testdataold" ) func TestOCToMetricData(t *testing.T) { tests := []struct { name string oc consumerdata.MetricsData - internal data.MetricData + internal dataold.MetricData }{ { name: "empty", oc: consumerdata.MetricsData{}, - internal: testdata.GenerateMetricDataEmpty(), + internal: testdataold.GenerateMetricDataEmpty(), }, { @@ -45,49 +45,49 @@ func TestOCToMetricData(t *testing.T) { Node: &occommon.Node{}, Resource: &ocresource.Resource{}, }, - internal: wrapMetricsWithEmptyResource(testdata.GenerateMetricDataOneEmptyResourceMetrics()), + internal: wrapMetricsWithEmptyResource(testdataold.GenerateMetricDataOneEmptyResourceMetrics()), }, { name: "no-libraries", oc: generateOCTestDataNoMetrics(), - internal: testdata.GenerateMetricDataNoLibraries(), + internal: testdataold.GenerateMetricDataNoLibraries(), }, { name: "all-types-no-points", oc: generateOCTestDataNoPoints(), - internal: testdata.GenerateMetricDataAllTypesNoDataPoints(), + internal: testdataold.GenerateMetricDataAllTypesNoDataPoints(), }, { name: "one-metric-no-labels", oc: generateOCTestDataNoLabels(), - internal: testdata.GenerateMetricDataOneMetricNoLabels(), + internal: testdataold.GenerateMetricDataOneMetricNoLabels(), }, { name: "one-metric", oc: generateOCTestDataMetricsOneMetric(), - internal: testdata.GenerateMetricDataOneMetric(), + internal: testdataold.GenerateMetricDataOneMetric(), }, { name: "one-metric-one-nil", oc: generateOCTestDataMetricsOneMetricOneNil(), - internal: testdata.GenerateMetricDataOneMetric(), + internal: testdataold.GenerateMetricDataOneMetric(), }, { name: "one-metric-one-nil-timeseries", oc: generateOCTestDataMetricsOneMetricOneNilTimeseries(), - internal: testdata.GenerateMetricDataOneMetric(), + internal: testdataold.GenerateMetricDataOneMetric(), }, { name: "one-metric-one-nil-point", oc: generateOCTestDataMetricsOneMetricOneNilPoint(), - internal: testdata.GenerateMetricDataOneMetric(), + internal: testdataold.GenerateMetricDataOneMetric(), }, { @@ -101,7 +101,7 @@ func TestOCToMetricData(t *testing.T) { generateOCTestMetricSummary(), }, }, - internal: testdata.GenerateMetricDataWithCountersHistogramAndSummary(), + internal: testdataold.GenerateMetricDataWithCountersHistogramAndSummary(), }, } @@ -114,7 +114,7 @@ func TestOCToMetricData(t *testing.T) { test.oc, test.oc, } - wantSlice := data.NewMetricData() + wantSlice := dataold.NewMetricData() // Double the ResourceMetrics only if not empty. if test.internal.ResourceMetrics().Len() != 0 { test.internal.Clone().ResourceMetrics().MoveAndAppendTo(wantSlice.ResourceMetrics()) @@ -127,7 +127,7 @@ func TestOCToMetricData(t *testing.T) { } // TODO: Try to avoid unnecessary Resource object allocation. -func wrapMetricsWithEmptyResource(md data.MetricData) data.MetricData { +func wrapMetricsWithEmptyResource(md dataold.MetricData) dataold.MetricData { md.ResourceMetrics().At(0).Resource().InitEmpty() return md }