From d8dd69a5af99f6a00f99f521b885d10c92962dfa Mon Sep 17 00:00:00 2001 From: Scott Gerring Date: Fri, 13 Dec 2024 14:43:57 +0100 Subject: [PATCH] chore: Add metric integration tests --- .../tests/integration_test/Cargo.toml | 2 +- .../expected/different_metrics.json | 133 +++++++++++++++++ .../integration_test/expected/metrics.json | 138 ++++++++++-------- .../expected/serialized_metrics.json | 103 +++++-------- .../otel-collector-config.yaml | 3 + .../integration_test/src/metrics_asserter.rs | 87 ++++++++++- .../tests/integration_tests.rs | 44 ++++++ .../tests/integration_test/tests/metrics.rs | 119 ++++++++++++++- 8 files changed, 490 insertions(+), 139 deletions(-) create mode 100644 opentelemetry-otlp/tests/integration_test/expected/different_metrics.json diff --git a/opentelemetry-otlp/tests/integration_test/Cargo.toml b/opentelemetry-otlp/tests/integration_test/Cargo.toml index 413673286e..023d5c908f 100644 --- a/opentelemetry-otlp/tests/integration_test/Cargo.toml +++ b/opentelemetry-otlp/tests/integration_test/Cargo.toml @@ -7,7 +7,7 @@ publish = false [dependencies] opentelemetry = { path = "../../../opentelemetry", features = ["metrics", "logs"] } -opentelemetry_sdk = { path = "../../../opentelemetry-sdk", features = ["rt-tokio", "logs", "testing"] } +opentelemetry_sdk = { path = "../../../opentelemetry-sdk", features = ["rt-tokio", "logs", "testing", "metrics"] } opentelemetry-proto = { path = "../../../opentelemetry-proto", features = ["gen-tonic-messages", "trace", "logs", "with-serde"] } log = { workspace = true } tokio = { version = "1.0", features = ["full"] } diff --git a/opentelemetry-otlp/tests/integration_test/expected/different_metrics.json b/opentelemetry-otlp/tests/integration_test/expected/different_metrics.json new file mode 100644 index 0000000000..5b9bcdba0a --- /dev/null +++ b/opentelemetry-otlp/tests/integration_test/expected/different_metrics.json @@ -0,0 +1,133 @@ +{ + "resourceMetrics": [ + { + "resource": { + "attributes": [ + { + "key": "service.name", + "value": { + "stringValue": "metrics-integration-test" + } + } + ] + }, + "scopeMetrics": [ + { + "scope": { + "name": "meter" + }, + "metrics": [ + { + "name": "counter_u64", + "sum": { + "dataPoints": [ + { + "attributes": [ + { + "key": "mykey1", + "value": { + "stringValue": "mydifferentval" + } + }, + { + "key": "mykey2", + "value": { + "stringValue": "myvalue2" + } + } + ], + "startTimeUnixNano": "1734094309366798000", + "timeUnixNano": "1734094317871514000", + "asInt": "15" + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + }, + { + "name": "example_histogram", + "histogram": { + "dataPoints": [ + { + "attributes": [ + { + "key": "mykey3", + "value": { + "stringValue": "myvalue4" + } + } + ], + "startTimeUnixNano": "1734094309366875000", + "timeUnixNano": "1734094317871537000", + "count": "1", + "sum": 42, + "bucketCounts": [ + "0", + "0", + "0", + "0", + "1", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0" + ], + "explicitBounds": [ + 0, + 5, + 10, + 25, + 50, + 75, + 100, + 250, + 500, + 750, + 1000, + 2500, + 5000, + 7500, + 10000 + ], + "min": 42, + "max": 42 + } + ], + "aggregationTemporality": 2 + } + }, + { + "name": "example_up_down_counter", + "sum": { + "dataPoints": [ + { + "attributes": [ + { + "key": "mykey5", + "value": { + "stringValue": "myvalue5" + } + } + ], + "startTimeUnixNano": "1734094309366941000", + "timeUnixNano": "1734094317871548000", + "asInt": "-1" + } + ], + "aggregationTemporality": 2 + } + } + ] + } + ] + } + ] +} diff --git a/opentelemetry-otlp/tests/integration_test/expected/metrics.json b/opentelemetry-otlp/tests/integration_test/expected/metrics.json index fa713b8ea3..f1711d889e 100644 --- a/opentelemetry-otlp/tests/integration_test/expected/metrics.json +++ b/opentelemetry-otlp/tests/integration_test/expected/metrics.json @@ -6,7 +6,7 @@ { "key": "service.name", "value": { - "stringValue": "my.service" + "stringValue": "metrics-integration-test" } } ] @@ -14,106 +14,120 @@ "scopeMetrics": [ { "scope": { - "name": "my.library", - "version": "1.0.0", - "attributes": [ - { - "key": "my.scope.attribute", - "value": { - "stringValue": "some scope attribute" - } - } - ] + "name": "meter" }, "metrics": [ { - "name": "my.counter", - "unit": "1", - "description": "I am a Counter", - "metadata": [], + "name": "counter_u64", "sum": { - "aggregationTemporality": 1, - "isMonotonic": true, "dataPoints": [ { - "asDouble": 5, - "startTimeUnixNano": "1544712660300000000", - "timeUnixNano": "1544712660300000000", "attributes": [ { - "key": "my.counter.attr", + "key": "mykey1", "value": { - "stringValue": "some value" + "stringValue": "myvalue1" + } + }, + { + "key": "mykey2", + "value": { + "stringValue": "myvalue2" } } ], - "exemplars": [], - "flags": 0 + "startTimeUnixNano": "1734094309366798000", + "timeUnixNano": "1734094317871514000", + "asInt": "10" } - ] + ], + "aggregationTemporality": 2, + "isMonotonic": true } }, { - "name": "my.gauge", - "unit": "1", - "description": "I am a Gauge", - "metadata": [], - "gauge": { + "name": "example_histogram", + "histogram": { "dataPoints": [ { - "asDouble": 10, - "startTimeUnixNano": "1544712660300000000", - "timeUnixNano": "1544712660300000000", "attributes": [ { - "key": "my.gauge.attr", + "key": "mykey3", "value": { - "stringValue": "some value" + "stringValue": "myvalue4" } } ], - "exemplars": [], - "flags": 0 + "startTimeUnixNano": "1734094309366875000", + "timeUnixNano": "1734094317871537000", + "count": "1", + "sum": 42, + "bucketCounts": [ + "0", + "0", + "0", + "0", + "1", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0" + ], + "explicitBounds": [ + 0, + 5, + 10, + 25, + 50, + 75, + 100, + 250, + 500, + 750, + 1000, + 2500, + 5000, + 7500, + 10000 + ], + "min": 42, + "max": 42 } - ] + ], + "aggregationTemporality": 2 } }, { - "name": "my.histogram", - "unit": "1", - "description": "I am a Histogram", - "metadata": [], - "histogram": { - "aggregationTemporality": 1, + "name": "example_up_down_counter", + "sum": { "dataPoints": [ { - "startTimeUnixNano": "1544712660300000000", - "timeUnixNano": "1544712660300000000", - "count": 2, - "sum": 2, - "bucketCounts": [1,1], - "explicitBounds": [1], - "min": 0, - "max": 2, "attributes": [ { - "key": "my.histogram.attr", + "key": "mykey5", "value": { - "stringValue": "some value" + "stringValue": "myvalue5" } } ], - "exemplars": [], - "flags": 0 + "startTimeUnixNano": "1734094309366941000", + "timeUnixNano": "1734094317871548000", + "asInt": "-1" } - ] + ], + "aggregationTemporality": 2 } } - ], - "schemaUrl": "whatever" + ] } - ], - "schemaUrl": "whatever" + ] } ] -} \ No newline at end of file +} diff --git a/opentelemetry-otlp/tests/integration_test/expected/serialized_metrics.json b/opentelemetry-otlp/tests/integration_test/expected/serialized_metrics.json index 4910e128a2..de13fb3cbf 100644 --- a/opentelemetry-otlp/tests/integration_test/expected/serialized_metrics.json +++ b/opentelemetry-otlp/tests/integration_test/expected/serialized_metrics.json @@ -6,7 +6,7 @@ { "key": "service.name", "value": { - "stringValue": "my.service" + "stringValue": "metrics-integration-test" } } ], @@ -15,112 +15,81 @@ "scopeMetrics": [ { "scope": { - "name": "my.library", - "version": "1.0.0", - "attributes": [ - { - "key": "my.scope.attribute", - "value": { - "stringValue": "some scope attribute" - } - } - ], + "name": "meter", + "version": "", + "attributes": [], "droppedAttributesCount": 0 }, "metrics": [ { - "name": "my.counter", - "description": "I am a Counter", - "unit": "1", + "name": "counter_u64", + "description": "", + "unit": "", "metadata": [], "sum": { "dataPoints": [ { "attributes": [ { - "key": "my.counter.attr", + "key": "mykey1", "value": { - "stringValue": "some value" + "stringValue": "myvalue1" + } + }, + { + "key": "mykey2", + "value": { + "stringValue": "myvalue2" } } ], - "startTimeUnixNano": "1544712660300000000", - "timeUnixNano": "1544712660300000000", + "startTimeUnixNano": "1734094309366798000", + "timeUnixNano": "1734094317871514000", "exemplars": [], - "flags": 0, - "asDouble": 5.0 + "flags": 0 } ], - "aggregationTemporality": 1, + "aggregationTemporality": 2, "isMonotonic": true } }, { - "name": "my.gauge", - "description": "I am a Gauge", - "unit": "1", - "metadata": [], - "gauge": { - "dataPoints": [ - { - "attributes": [ - { - "key": "my.gauge.attr", - "value": { - "stringValue": "some value" - } - } - ], - "startTimeUnixNano": "1544712660300000000", - "timeUnixNano": "1544712660300000000", - "exemplars": [], - "flags": 0, - "asDouble": 10.0 - } - ] - } + "name": "example_histogram", + "description": "", + "unit": "", + "metadata": [] }, { - "name": "my.histogram", - "description": "I am a Histogram", - "unit": "1", + "name": "example_up_down_counter", + "description": "", + "unit": "", "metadata": [], - "histogram": { + "sum": { "dataPoints": [ { "attributes": [ { - "key": "my.histogram.attr", + "key": "mykey5", "value": { - "stringValue": "some value" + "stringValue": "myvalue5" } } ], - "startTimeUnixNano": "1544712660300000000", - "timeUnixNano": "1544712660300000000", - "count": 2, - "sum": 2.0, - "bucketCounts": [ - 1, - 1 - ], - "explicitBounds": [ - 1.0 - ], + "startTimeUnixNano": "1734094309366941000", + "timeUnixNano": "1734094317871548000", "exemplars": [], - "flags": 0, - "min": 0.0, - "max": 2.0 + "flags": 0 } ], - "aggregationTemporality": 1 + "aggregationTemporality": 2, + "isMonotonic": false } } ], - "schemaUrl": "whatever" + "schemaUrl": "" } ], - "schemaUrl": "whatever" + "schemaUrl": "" } ] } \ No newline at end of file diff --git a/opentelemetry-otlp/tests/integration_test/otel-collector-config.yaml b/opentelemetry-otlp/tests/integration_test/otel-collector-config.yaml index 7cd19bbfee..846339dee8 100644 --- a/opentelemetry-otlp/tests/integration_test/otel-collector-config.yaml +++ b/opentelemetry-otlp/tests/integration_test/otel-collector-config.yaml @@ -18,3 +18,6 @@ service: logs: receivers: [otlp] exporters: [file] + metrics: + receivers: [otlp] + exporters: [file] \ No newline at end of file diff --git a/opentelemetry-otlp/tests/integration_test/src/metrics_asserter.rs b/opentelemetry-otlp/tests/integration_test/src/metrics_asserter.rs index 4845270999..cfef6d5742 100644 --- a/opentelemetry-otlp/tests/integration_test/src/metrics_asserter.rs +++ b/opentelemetry-otlp/tests/integration_test/src/metrics_asserter.rs @@ -1,6 +1,6 @@ use std::fs::File; -use opentelemetry_proto::tonic::metrics::v1::{MetricsData, ResourceMetrics}; +use opentelemetry_proto::tonic::metrics::v1::{metric, MetricsData, ResourceMetrics, ScopeMetrics}; pub struct MetricsAsserter { results: Vec, @@ -13,23 +13,91 @@ impl MetricsAsserter { } pub fn assert(self) { - self.assert_resource_metrics_eq(&self.results, &self.expected); + let mut results = self.results.clone(); + let mut expected = self.expected.clone(); + self.assert_resource_metrics_eq(&mut results, &mut expected); } fn assert_resource_metrics_eq( &self, - results: &[ResourceMetrics], - expected: &[ResourceMetrics], + results: &mut [ResourceMetrics], + expected: &mut [ResourceMetrics], ) { assert_eq!(results.len(), expected.len()); for i in 0..results.len() { - let result_resource_metrics = &results[i]; - let expected_resource_metrics = &expected[i]; - assert_eq!(result_resource_metrics, expected_resource_metrics); + let mut result_resource_metrics = &mut results.get_mut(i).unwrap(); + let mut expected_resource_metrics = &mut expected.get_mut(i).unwrap(); + assert_eq(*result_resource_metrics, *expected_resource_metrics); } } } +pub fn zero_out_scope_metrics_timestamps(scope_metrics: &mut Vec) { + for scope_metric in scope_metrics { + for metric in &mut scope_metric.metrics { + match &mut metric.data { + Some(metric::Data::Gauge(gauge)) => { + for data_point in &mut gauge.data_points { + data_point.start_time_unix_nano = 0; + data_point.time_unix_nano = 0; + } + } + Some(metric::Data::Sum(sum)) => { + for data_point in &mut sum.data_points { + data_point.start_time_unix_nano = 0; + data_point.time_unix_nano = 0; + } + } + Some(metric::Data::Histogram(hist)) => { + for data_point in &mut hist.data_points { + data_point.start_time_unix_nano = 0; + data_point.time_unix_nano = 0; + } + } + Some(metric::Data::ExponentialHistogram(ehist)) => { + for data_point in &mut ehist.data_points { + data_point.start_time_unix_nano = 0; + data_point.time_unix_nano = 0; + } + } + Some(metric::Data::Summary(summary)) => { + for data_point in &mut summary.data_points { + data_point.start_time_unix_nano = 0; + data_point.time_unix_nano = 0; + } + } + None => {} // Do nothing for metrics with no data + } + } + } +} + +/// +/// Compare ResourceMetrics to each other. Because we have timestamps that will be +/// different we need to actively handle this, rather than relying on default +/// comparisons. +/// +fn assert_eq( + result_resource_metrics: &mut ResourceMetrics, + expected_resource_metrics: &mut ResourceMetrics, +) { + // No timestamps on the resource itself - compare this verbatim + assert_eq!( + result_resource_metrics.resource, + expected_resource_metrics.resource + ); + + // Compare the metrics themselves + let mut result_scope_metrics = &mut result_resource_metrics.scope_metrics; + let mut expected_scope_metrics = &mut expected_resource_metrics.scope_metrics; + + // Zero out all the timestamps so we can usual the default comparisons + zero_out_scope_metrics_timestamps(&mut result_scope_metrics); + zero_out_scope_metrics_timestamps(&mut expected_scope_metrics); + + assert_eq!(result_scope_metrics, expected_scope_metrics); +} + // read a file contains ResourceMetrics in json format pub fn read_metrics_from_json(file: File) -> Vec { let reader = std::io::BufReader::new(file); @@ -38,3 +106,8 @@ pub fn read_metrics_from_json(file: File) -> Vec { serde_json::from_reader(reader).expect("Failed to read json file"); metrics_data.resource_metrics } + +pub fn read_metrics_from_json_string(json: &String) -> Vec { + let metrics_data: MetricsData = serde_json::from_str(json).expect("Failed to read json file"); + metrics_data.resource_metrics +} diff --git a/opentelemetry-otlp/tests/integration_test/tests/integration_tests.rs b/opentelemetry-otlp/tests/integration_test/tests/integration_tests.rs index 5f5468d0dc..1928ceb296 100644 --- a/opentelemetry-otlp/tests/integration_test/tests/integration_tests.rs +++ b/opentelemetry-otlp/tests/integration_test/tests/integration_tests.rs @@ -54,6 +54,7 @@ impl TestSuite { async fn integration_tests() { trace_integration_tests().await; logs_integration_tests().await; + metrics_integration_tests().await; } async fn trace_integration_tests() { @@ -140,3 +141,46 @@ async fn logs_integration_tests() { collector_container.stop(); } + +async fn metrics_integration_tests() { + let test_suites = [TestSuite::new("metrics.json")]; + + let mut collector_image = Collector::default(); + for test in test_suites.as_ref() { + let _ = test.create_temporary_result_file(); + collector_image = collector_image.with_volume( + test.result_file_path().as_str(), + test.result_file_path_in_container().as_str(), + ); + } + + let docker = Cli::default(); + let mut image = + RunnableImage::from(collector_image).with_container_name(COLLECTOR_CONTAINER_NAME); + + for port in [ + 4317, // gRPC port + 4318, // HTTP port + ] { + image = image.with_mapped_port(Port { + local: port, + internal: port, + }) + } + + let collector_container = docker.run(image); + + tokio::time::sleep(Duration::from_secs(5)).await; + metrics::metrics().await.unwrap(); + + // wait for file to flush to disks + // ideally we should use volume mount but otel collector file exporter doesn't handle permission too well + // bind mount mitigate the issue by set up the permission correctly on host system + tokio::time::sleep(Duration::from_secs(5)).await; + metrics::assert_metrics_results( + test_suites[0].result_file_path().as_str(), + test_suites[0].expected_file_path().as_str(), + ); + + collector_container.stop(); +} diff --git a/opentelemetry-otlp/tests/integration_test/tests/metrics.rs b/opentelemetry-otlp/tests/integration_test/tests/metrics.rs index 5395c67d58..0b10ab2d0d 100644 --- a/opentelemetry-otlp/tests/integration_test/tests/metrics.rs +++ b/opentelemetry-otlp/tests/integration_test/tests/metrics.rs @@ -1,7 +1,104 @@ +use integration_test_runner::logs_asserter::{read_logs_from_json, LogsAsserter}; +use integration_test_runner::metrics_asserter::{ + read_metrics_from_json, read_metrics_from_json_string, MetricsAsserter, +}; +use opentelemetry::metrics::MeterProvider; +use opentelemetry::trace::FutureExt; +use opentelemetry::KeyValue; +use opentelemetry_otlp::MetricExporter; +use opentelemetry_proto::tonic::metrics::v1::MetricsData; +use opentelemetry_sdk::metrics::{ + MeterProviderBuilder, MetricError, PeriodicReader, SdkMeterProvider, +}; +use opentelemetry_sdk::{runtime, Resource}; +use std::error::Error; +use std::io::{BufRead, BufReader, Read}; +use std::os::unix::fs::MetadataExt; +use std::time::Duration; use std::{fs::File, io::Write}; -use integration_test_runner::metrics_asserter::{read_metrics_from_json, MetricsAsserter}; -use opentelemetry_proto::tonic::metrics::v1::MetricsData; +fn init_metrics() -> SdkMeterProvider { + // Create the OTLP exporter + let exporter_builder = MetricExporter::builder(); + + #[cfg(feature = "tonic-client")] + let exporter_builder = exporter_builder.with_tonic(); + #[cfg(not(feature = "tonic-client"))] + #[cfg(any( + feature = "hyper-client", + feature = "reqwest-client", + feature = "reqwest-blocking-client" + ))] + let exporter_builder = exporter_builder.with_http(); + + let exporter = exporter_builder + .build() + .expect("Failed to build MetricExporter"); + + // Wrap the exporter in a MetricReader + // Create a periodic reader with desired interval and timeout + let reader = PeriodicReader::builder(exporter) + .with_interval(Duration::from_millis(100)) // Adjust the interval as needed + .with_timeout(Duration::from_secs(1)) // Adjust the timeout as needed + .build(); + + // Add resource information for this meter provider + let resource = Resource::new(vec![KeyValue::new( + opentelemetry_semantic_conventions::resource::SERVICE_NAME, + "metrics-integration-test", + )]); + + // Build the SdkMeterProvider + let meter_provider = MeterProviderBuilder::default() + .with_resource(resource) + .with_reader(reader) + .build(); + + // Set the meter provider globally + opentelemetry::global::set_meter_provider(meter_provider.clone()); + + meter_provider +} + +pub async fn metrics() -> Result<(), Box> { + let meter_provider = init_metrics(); + + let meter = meter_provider.meter("meter"); + let counter = meter.u64_counter("counter_u64").build(); + counter.add( + 10, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ); + + let histogram = meter.u64_histogram("example_histogram").build(); + histogram.record(42, &[KeyValue::new("mykey3", "myvalue4")]); + + let up_down_counter = meter.i64_up_down_counter("example_up_down_counter").build(); + up_down_counter.add(-1, &[KeyValue::new("mykey5", "myvalue5")]); + + Ok(()) +} + +pub fn assert_metrics_results(result: &str, expected: &str) { + let left = read_metrics_from_json(File::open(expected).unwrap()); + + // For the results file for metrics - what the OTLP collector outputs - + // we get a line per unit of time. So let's grab the last line, and compare that + // to our expected data. + let last_line = BufReader::new(File::open(result).expect("can open metrics results file")) + .lines() + .last() + .expect("metrics results has a final line") + .unwrap(); + let right = read_metrics_from_json_string(&last_line); + + MetricsAsserter::new(left, right).assert(); + + assert!(File::open(result).unwrap().metadata().unwrap().size() > 0) +} #[test] fn test_serde() { @@ -21,3 +118,21 @@ fn test_serde() { MetricsAsserter::new(left, right).assert(); } + +#[test] +#[should_panic(expected = "assertion `left == right` failed")] +pub fn test_assert_metrics_eq_failure() { + let left = read_metrics_from_json(File::open("./expected/metrics.json").unwrap()); + let right = read_metrics_from_json(File::open("./expected/different_metrics.json").unwrap()); + + MetricsAsserter::new(left, right).assert(); +} + +/// +/// Make sure that metrics that are the same are equal(...) +/// +#[test] +pub fn test_assert_metrics_eq() { + let metrics = read_metrics_from_json(File::open("./expected/metrics.json").unwrap()); + MetricsAsserter::new(metrics.clone(), metrics).assert(); +}