From d25598716d09210d2bf47a18033ec8e667695187 Mon Sep 17 00:00:00 2001 From: Jack Berg Date: Thu, 15 Dec 2022 15:16:12 -0600 Subject: [PATCH] Delete MapCounter alternative histogram implementation --- .../aggregator/HistogramAggregationParam.java | 17 +-- .../aggregator/HistogramValueGenerator.java | 2 +- .../AdaptingCircularBufferCounter.java | 87 ++++++----- .../AdaptingIntegerArray.java | 19 +-- .../DoubleExponentialHistogramAggregator.java | 39 ++--- .../DoubleExponentialHistogramBuckets.java | 20 +-- .../aggregator/ExponentialBucketStrategy.java | 48 ------ .../ExponentialHistogramIndexer.java | 6 +- .../internal/state/ExponentialCounter.java | 61 -------- .../state/ExponentialCounterFactory.java | 64 -------- .../metrics/internal/state/MapCounter.java | 142 ------------------ .../view/ExponentialHistogramAggregation.java | 4 +- .../AdaptingCircularBufferCounterTest.java | 80 ++++++++++ .../AdaptingIntegerArrayTest.java | 2 +- ...bleExponentialHistogramAggregatorTest.java | 22 +-- ...DoubleExponentialHistogramBucketsTest.java | 64 ++++---- .../state/ExponentialCounterTest.java | 127 ---------------- 17 files changed, 202 insertions(+), 602 deletions(-) rename sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/{state => aggregator}/AdaptingCircularBufferCounter.java (60%) rename sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/{state => aggregator}/AdaptingIntegerArray.java (93%) delete mode 100644 sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialBucketStrategy.java delete mode 100644 sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounter.java delete mode 100644 sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterFactory.java delete mode 100644 sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MapCounter.java create mode 100644 sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounterTest.java rename sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/{state => aggregator}/AdaptingIntegerArrayTest.java (97%) delete mode 100644 sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterTest.java diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java index 0deae71bd80..8674da73b9b 100644 --- a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java @@ -6,7 +6,6 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; import java.util.Collections; /** The types of histogram aggregation to benchmark. */ @@ -22,23 +21,13 @@ public enum HistogramAggregationParam { ExplicitBucketHistogramUtils.createBoundaryArray(Collections.emptyList()), ExemplarReservoir::doubleNoSamples)), EXPONENTIAL_SMALL_CIRCULAR_BUFFER( - new DoubleExponentialHistogramAggregator( - ExemplarReservoir::doubleNoSamples, - ExponentialBucketStrategy.newStrategy( - 20, ExponentialCounterFactory.circularBufferCounter(), 0))), + new DoubleExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 20, 0)), EXPONENTIAL_CIRCULAR_BUFFER( - new DoubleExponentialHistogramAggregator( - ExemplarReservoir::doubleNoSamples, - ExponentialBucketStrategy.newStrategy( - 160, ExponentialCounterFactory.circularBufferCounter(), 0))), - EXPONENTIAL_MAP_COUNTER( - new DoubleExponentialHistogramAggregator( - ExemplarReservoir::doubleNoSamples, - ExponentialBucketStrategy.newStrategy(160, ExponentialCounterFactory.mapCounter(), 0))); + new DoubleExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 160, 0)); private final Aggregator aggregator; - private HistogramAggregationParam(Aggregator aggregator) { + HistogramAggregationParam(Aggregator aggregator) { this.aggregator = aggregator; } diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramValueGenerator.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramValueGenerator.java index bdcb1d04ba5..31a3213133c 100644 --- a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramValueGenerator.java +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramValueGenerator.java @@ -35,7 +35,7 @@ public enum HistogramValueGenerator { private static final int INITIAL_SEED = 513423236; private final double[] pool; - private HistogramValueGenerator(double[] pool) { + HistogramValueGenerator(double[] pool) { this.pool = pool; } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingCircularBufferCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounter.java similarity index 60% rename from sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingCircularBufferCounter.java rename to sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounter.java index fc0c347c722..14e165f73a1 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingCircularBufferCounter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounter.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.sdk.metrics.internal.state; +package io.opentelemetry.sdk.metrics.internal.aggregator; /** * A circle-buffer-backed exponential counter. @@ -13,11 +13,8 @@ *

This expand start/End index as it sees values. * *

This class is NOT thread-safe. It is expected to be behind a synchronized incrementer. - * - *

This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time */ -public class AdaptingCircularBufferCounter implements ExponentialCounter { +final class AdaptingCircularBufferCounter { private static final int NULL_INDEX = Integer.MIN_VALUE; private int endIndex = NULL_INDEX; private int startIndex = NULL_INDEX; @@ -25,44 +22,48 @@ public class AdaptingCircularBufferCounter implements ExponentialCounter { private final AdaptingIntegerArray backing; /** Constructs a circular buffer that will hold at most {@code maxSize} buckets. */ - public AdaptingCircularBufferCounter(int maxSize) { + AdaptingCircularBufferCounter(int maxSize) { this.backing = new AdaptingIntegerArray(maxSize); } /** (Deep)-Copies the values from another exponential counter. */ - public AdaptingCircularBufferCounter(ExponentialCounter toCopy) { - // If toCopy is an AdaptingCircularBuffer, just do a copy of the underlying array - // and baseIndex. - if (toCopy instanceof AdaptingCircularBufferCounter) { - this.backing = ((AdaptingCircularBufferCounter) toCopy).backing.copy(); - this.startIndex = toCopy.getIndexStart(); - this.endIndex = toCopy.getIndexEnd(); - this.baseIndex = ((AdaptingCircularBufferCounter) toCopy).baseIndex; - } else { - // Copy values from some other implementation of ExponentialCounter. - this.backing = new AdaptingIntegerArray(toCopy.getMaxSize()); - this.startIndex = NULL_INDEX; - this.baseIndex = NULL_INDEX; - this.endIndex = NULL_INDEX; - for (int i = toCopy.getIndexStart(); i <= toCopy.getIndexEnd(); i++) { - long val = toCopy.get(i); - this.increment(i, val); - } - } + AdaptingCircularBufferCounter(AdaptingCircularBufferCounter toCopy) { + this.backing = toCopy.backing.copy(); + this.startIndex = toCopy.getIndexStart(); + this.endIndex = toCopy.getIndexEnd(); + this.baseIndex = toCopy.baseIndex; } - @Override - public int getIndexStart() { + /** + * The first index with a recording. May be negative. + * + *

Note: the returned value is not meaningful when isEmpty returns true. + * + * @return the first index with a recording. + */ + int getIndexStart() { return startIndex; } - @Override - public int getIndexEnd() { + /** + * The last index with a recording. May be negative. + * + *

Note: the returned value is not meaningful when isEmpty returns true. + * + * @return The last index with a recording. + */ + int getIndexEnd() { return endIndex; } - @Override - public boolean increment(int index, long delta) { + /** + * Persist new data at index, incrementing by delta amount. + * + * @param index The index of where to perform the incrementation. + * @param delta How much to increment the index by. + * @return success status. + */ + boolean increment(int index, long delta) { if (baseIndex == NULL_INDEX) { startIndex = index; endIndex = index; @@ -89,26 +90,34 @@ public boolean increment(int index, long delta) { return true; } - @Override - public long get(int index) { + /** + * Get the number of recordings for the given index. + * + * @return the number of recordings for the index, or 0 if the index is out of bounds. + */ + long get(int index) { if (index < startIndex || index > endIndex) { return 0; } return backing.get(toBufferIndex(index)); } - @Override - public boolean isEmpty() { + /** + * Boolean denoting if the backing structure has recordings or not. + * + * @return true if no recordings, false if at least one recording. + */ + boolean isEmpty() { return baseIndex == NULL_INDEX; } - @Override - public int getMaxSize() { + /** Returns the maximum number of buckets allowed in this counter. */ + int getMaxSize() { return backing.length(); } - @Override - public void clear() { + /** Resets all bucket counts to zero and resets index start/end tracking. */ + void clear() { this.backing.clear(); this.baseIndex = NULL_INDEX; this.endIndex = NULL_INDEX; diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingIntegerArray.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingIntegerArray.java similarity index 93% rename from sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingIntegerArray.java rename to sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingIntegerArray.java index 37ece0b3837..22405a6e475 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingIntegerArray.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingIntegerArray.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.sdk.metrics.internal.state; +package io.opentelemetry.sdk.metrics.internal.aggregator; import java.util.Arrays; import javax.annotation.Nullable; @@ -31,11 +31,8 @@ *

  • If cellSize == INT then intBacking is not null *
  • If cellSize == LONG then longBacking is not null * - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. */ -public class AdaptingIntegerArray { +final class AdaptingIntegerArray { @Nullable private byte[] byteBacking; @Nullable private short[] shortBacking; @Nullable private int[] intBacking; @@ -52,7 +49,7 @@ private enum ArrayCellSize { private ArrayCellSize cellSize; /** Construct an adapting integer array of a given size. */ - public AdaptingIntegerArray(int size) { + AdaptingIntegerArray(int size) { this.cellSize = ArrayCellSize.BYTE; this.byteBacking = new byte[size]; } @@ -78,13 +75,13 @@ private AdaptingIntegerArray(AdaptingIntegerArray toCopy) { } /** Returns a deep-copy of this array, preserving cell size. */ - public AdaptingIntegerArray copy() { + AdaptingIntegerArray copy() { return new AdaptingIntegerArray(this); } /** Add {@code count} to the value stored at {@code index}. */ @SuppressWarnings("NullAway") - public void increment(int idx, long count) { + void increment(int idx, long count) { // TODO - prevent bad index long result; switch (cellSize) { @@ -124,7 +121,7 @@ public void increment(int idx, long count) { /** Grab the value stored at {@code index}. */ @SuppressWarnings("NullAway") - public long get(int index) { + long get(int index) { long value = 0; switch (this.cellSize) { case BYTE: @@ -145,7 +142,7 @@ public long get(int index) { /** Return the length of this integer array. */ @SuppressWarnings("NullAway") - public int length() { + int length() { int length = 0; switch (this.cellSize) { case BYTE: @@ -165,7 +162,7 @@ public int length() { } /** Zeroes out all counts in this array. */ @SuppressWarnings("NullAway") - public void clear() { + void clear() { switch (this.cellSize) { case BYTE: Arrays.fill(this.byteBacking, (byte) 0); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregator.java index 8ac8a212c36..14e9e6242a8 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregator.java @@ -16,7 +16,6 @@ import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; import io.opentelemetry.sdk.resources.Resource; import java.util.Collections; import java.util.List; @@ -35,7 +34,8 @@ public final class DoubleExponentialHistogramAggregator implements Aggregator { private final Supplier> reservoirSupplier; - private final ExponentialBucketStrategy bucketStrategy; + private final int maxBuckets; + private final int startingScale; /** * Constructs an exponential histogram aggregator. @@ -43,23 +43,17 @@ public final class DoubleExponentialHistogramAggregator * @param reservoirSupplier Supplier of exemplar reservoirs per-stream. */ public DoubleExponentialHistogramAggregator( - Supplier> reservoirSupplier, int maxBuckets) { - this( - reservoirSupplier, - ExponentialBucketStrategy.newStrategy( - maxBuckets, ExponentialCounterFactory.circularBufferCounter())); - } - - DoubleExponentialHistogramAggregator( Supplier> reservoirSupplier, - ExponentialBucketStrategy bucketStrategy) { + int maxBuckets, + int startingScale) { this.reservoirSupplier = reservoirSupplier; - this.bucketStrategy = bucketStrategy; + this.maxBuckets = maxBuckets; + this.startingScale = startingScale; } @Override public AggregatorHandle createHandle() { - return new Handle(reservoirSupplier.get(), this.bucketStrategy); + return new Handle(reservoirSupplier.get(), maxBuckets, startingScale); } /** @@ -183,7 +177,7 @@ public MetricData toMetricData( static final class Handle extends AggregatorHandle { - private final ExponentialBucketStrategy bucketStrategy; + private final int maxBuckets; @Nullable private DoubleExponentialHistogramBuckets positiveBuckets; @Nullable private DoubleExponentialHistogramBuckets negativeBuckets; private long zeroCount; @@ -193,16 +187,15 @@ static final class Handle private long count; private int scale; - Handle( - ExemplarReservoir reservoir, ExponentialBucketStrategy bucketStrategy) { + Handle(ExemplarReservoir reservoir, int maxBuckets, int startingScale) { super(reservoir); - this.bucketStrategy = bucketStrategy; + this.maxBuckets = maxBuckets; this.sum = 0; this.zeroCount = 0; this.min = Double.MAX_VALUE; this.max = -1; this.count = 0; - this.scale = bucketStrategy.getStartingScale(); + this.scale = startingScale; } @Override @@ -260,17 +253,15 @@ protected synchronized void doRecordDouble(double value) { zeroCount++; return; } else if (c > 0) { - // Initialize positive buckets if needed, adjusting to current scale + // Initialize positive buckets at current scale, if needed if (positiveBuckets == null) { - positiveBuckets = bucketStrategy.newBuckets(); - positiveBuckets.downscale(positiveBuckets.getScale() - scale); + positiveBuckets = new DoubleExponentialHistogramBuckets(scale, maxBuckets); } buckets = positiveBuckets; } else { - // Initialize negative buckets if needed, adjusting to current scale + // Initialize negative buckets at current scale, if needed if (negativeBuckets == null) { - negativeBuckets = bucketStrategy.newBuckets(); - negativeBuckets.downscale(negativeBuckets.getScale() - scale); + negativeBuckets = new DoubleExponentialHistogramBuckets(scale, maxBuckets); } buckets = negativeBuckets; } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBuckets.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBuckets.java index 6c0928eb6b2..4cc750c0c10 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBuckets.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBuckets.java @@ -7,11 +7,8 @@ import io.opentelemetry.sdk.internal.PrimitiveLongList; import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramBuckets; -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounter; -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; import java.util.Collections; import java.util.List; -import javax.annotation.Nonnull; import javax.annotation.Nullable; /** @@ -23,16 +20,13 @@ */ final class DoubleExponentialHistogramBuckets implements ExponentialHistogramBuckets { - private final ExponentialCounterFactory counterFactory; - private ExponentialCounter counts; + private AdaptingCircularBufferCounter counts; private int scale; private ExponentialHistogramIndexer exponentialHistogramIndexer; private long totalCount; - DoubleExponentialHistogramBuckets( - int startingScale, int maxBuckets, ExponentialCounterFactory counterFactory) { - this.counterFactory = counterFactory; - this.counts = counterFactory.newCounter(maxBuckets); + DoubleExponentialHistogramBuckets(int startingScale, int maxBuckets) { + this.counts = new AdaptingCircularBufferCounter(maxBuckets); this.scale = startingScale; this.exponentialHistogramIndexer = ExponentialHistogramIndexer.get(scale); this.totalCount = 0; @@ -40,8 +34,7 @@ final class DoubleExponentialHistogramBuckets implements ExponentialHistogramBuc // For copying DoubleExponentialHistogramBuckets(DoubleExponentialHistogramBuckets buckets) { - this.counterFactory = buckets.counterFactory; - this.counts = counterFactory.copy(buckets.counts); + this.counts = new AdaptingCircularBufferCounter(buckets.counts); this.scale = buckets.scale; this.exponentialHistogramIndexer = buckets.exponentialHistogramIndexer; this.totalCount = buckets.totalCount; @@ -53,7 +46,7 @@ DoubleExponentialHistogramBuckets copy() { } /** Resets all counters in this bucket set to zero, but preserves scale. */ - public void clear() { + void clear() { this.totalCount = 0; this.counts.clear(); } @@ -82,7 +75,6 @@ public int getOffset() { return counts.getIndexStart(); } - @Nonnull @Override public List getBucketCounts() { if (counts.isEmpty()) { @@ -113,7 +105,7 @@ void downscale(int by) { // We want to preserve other optimisations here as well, e.g. integer size. // Instead of creating a new counter, we copy the existing one (for bucket size // optimisations), and clear the values before writing the new ones. - ExponentialCounter newCounts = counterFactory.copy(counts); + AdaptingCircularBufferCounter newCounts = new AdaptingCircularBufferCounter(counts); newCounts.clear(); for (int i = counts.getIndexStart(); i <= counts.getIndexEnd(); i++) { diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialBucketStrategy.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialBucketStrategy.java deleted file mode 100644 index 36981ff5d13..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialBucketStrategy.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.aggregator; - -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; - -/** The configuration for how to create exponential histogram buckets. */ -final class ExponentialBucketStrategy { - - private static final int DEFAULT_STARTING_SCALE = 20; - - private final int startingScale; - /** The maximum number of buckets that will be used for positive or negative recordings. */ - private final int maxBuckets; - /** The mechanism of constructing and copying buckets. */ - private final ExponentialCounterFactory counterFactory; - - private ExponentialBucketStrategy( - int startingScale, int maxBuckets, ExponentialCounterFactory counterFactory) { - this.startingScale = startingScale; - this.maxBuckets = maxBuckets; - this.counterFactory = counterFactory; - } - - /** Constructs fresh new buckets with default settings. */ - DoubleExponentialHistogramBuckets newBuckets() { - return new DoubleExponentialHistogramBuckets(startingScale, maxBuckets, counterFactory); - } - - int getStartingScale() { - return startingScale; - } - - /** Create a new strategy for generating Exponential Buckets. */ - static ExponentialBucketStrategy newStrategy( - int maxBuckets, ExponentialCounterFactory counterFactory) { - return new ExponentialBucketStrategy(DEFAULT_STARTING_SCALE, maxBuckets, counterFactory); - } - - /** Create a new strategy for generating Exponential Buckets. */ - static ExponentialBucketStrategy newStrategy( - int maxBuckets, ExponentialCounterFactory counterFactory, int startingScale) { - return new ExponentialBucketStrategy(startingScale, maxBuckets, counterFactory); - } -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexer.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexer.java index 8bd95f3275b..f7359754358 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexer.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexer.java @@ -8,7 +8,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -class ExponentialHistogramIndexer { +final class ExponentialHistogramIndexer { private static final Map cache = new ConcurrentHashMap<>(); @@ -40,8 +40,8 @@ private ExponentialHistogramIndexer(int scale) { this.scaleFactor = computeScaleFactor(scale); } - /** Get an indexer for the given scale. Indexers are cached and reused for */ - public static ExponentialHistogramIndexer get(int scale) { + /** Get an indexer for the given scale. Indexers are cached and reused for performance. */ + static ExponentialHistogramIndexer get(int scale) { return cache.computeIfAbsent(scale, unused -> new ExponentialHistogramIndexer(scale)); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounter.java deleted file mode 100644 index 4b89f53ef2d..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounter.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.state; - -/** - * Interface for use as backing data structure for exponential histogram buckets. - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -public interface ExponentialCounter { - /** - * The first index with a recording. May be negative. - * - *

    Note: the returned value is not meaningful when isEmpty returns true. - * - * @return the first index with a recording. - */ - int getIndexStart(); - - /** - * The last index with a recording. May be negative. - * - *

    Note: the returned value is not meaningful when isEmpty returns true. - * - * @return The last index with a recording. - */ - int getIndexEnd(); - - /** - * Persist new data at index, incrementing by delta amount. - * - * @param index The index of where to perform the incrementation. - * @param delta How much to increment the index by. - * @return success status. - */ - boolean increment(int index, long delta); - - /** - * Get the number of recordings for the given index. - * - * @return the number of recordings for the index, or 0 if the index is out of bounds. - */ - long get(int index); - - /** - * Boolean denoting if the backing structure has recordings or not. - * - * @return true if no recordings, false if at least one recording. - */ - boolean isEmpty(); - - /** Returns the maximum number of buckets allowed in this counter. */ - int getMaxSize(); - - /** Resets all bucket counts to zero and resets index start/end tracking. */ - void clear(); -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterFactory.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterFactory.java deleted file mode 100644 index 9c649eed748..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterFactory.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.state; - -/** - * Interface for constructing backing data structure for exponential histogram buckets. - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -public interface ExponentialCounterFactory { - /** - * Constructs a new {@link io.opentelemetry.sdk.metrics.internal.state.ExponentialCounter} with - * maximum bucket size. - * - * @param maxSize The maximum number of buckets allowed in the counter. - */ - ExponentialCounter newCounter(int maxSize); - - /** Returns a deep-copy of an ExponentialCounter. */ - ExponentialCounter copy(ExponentialCounter other); - - /** Constructs exponential counters using {@link MapCounter}. */ - static ExponentialCounterFactory mapCounter() { - return new ExponentialCounterFactory() { - @Override - public ExponentialCounter newCounter(int maxSize) { - return new MapCounter(maxSize); - } - - @Override - public ExponentialCounter copy(ExponentialCounter other) { - return new MapCounter(other); - } - - @Override - public String toString() { - return "mapCounter"; - } - }; - } - /** Constructs exponential counters using {@link AdaptingCircularBufferCounter}. */ - static ExponentialCounterFactory circularBufferCounter() { - return new ExponentialCounterFactory() { - @Override - public ExponentialCounter newCounter(int maxSize) { - return new AdaptingCircularBufferCounter(maxSize); - } - - @Override - public ExponentialCounter copy(ExponentialCounter other) { - return new AdaptingCircularBufferCounter(other); - } - - @Override - public String toString() { - return "circularBufferCounter"; - } - }; - } -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MapCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MapCounter.java deleted file mode 100644 index 7c6c99b12c9..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MapCounter.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.state; - -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicLong; - -/** - * Simple-as-possible backing structure for exponential histogram buckets. Can be used as a baseline - * against other data structures. - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time - */ -public class MapCounter implements ExponentialCounter { - private static final int NULL_INDEX = Integer.MIN_VALUE; - - private final int maxSize; - private final Map backing; - private int indexStart; - private int indexEnd; - - /** Instantiate a MapCounter. */ - public MapCounter(int maxSize) { - this.maxSize = maxSize; - this.backing = new ConcurrentHashMap<>((int) Math.ceil(maxSize / 0.75) + 1); - this.indexEnd = NULL_INDEX; - this.indexStart = NULL_INDEX; - } - - /** - * Create an independent copy of another ExponentialCounter. - * - * @param otherCounter another exponential counter to make a deep copy of. - */ - public MapCounter(ExponentialCounter otherCounter) { - this.maxSize = otherCounter.getMaxSize(); - this.backing = new ConcurrentHashMap<>((int) Math.ceil(maxSize / 0.75) + 1); - this.indexStart = otherCounter.getIndexStart(); - this.indexEnd = otherCounter.getIndexEnd(); - - // copy values - for (int i = indexStart; i <= indexEnd; i++) { - long val = otherCounter.get(i); - if (val != 0) { - this.backing.put(i, new AtomicLong(val)); - } - } - } - - @Override - public int getIndexStart() { - return indexStart; - } - - @Override - public int getIndexEnd() { - return indexEnd; - } - - @Override - public boolean increment(int index, long delta) { - if (indexStart == NULL_INDEX) { - indexStart = index; - indexEnd = index; - doIncrement(index, delta); - return true; - } - - // Extend window if possible. if it would exceed maxSize, then return false. - if (index > indexEnd) { - if ((long) index - indexStart + 1 > maxSize) { - return false; - } - indexEnd = index; - } else if (index < indexStart) { - if ((long) indexEnd - index + 1 > maxSize) { - return false; - } - indexStart = index; - } - - doIncrement(index, delta); - return true; - } - - @Override - public long get(int index) { - if (index < indexStart || index > indexEnd) { - return 0; - } - AtomicLong result = backing.get(index); - if (result == null) { - return 0; - } - return result.longValue(); - } - - @Override - public boolean isEmpty() { - return backing.isEmpty(); - } - - @Override - public int getMaxSize() { - return maxSize; - } - - @Override - public void clear() { - this.backing.clear(); - this.indexStart = NULL_INDEX; - this.indexEnd = NULL_INDEX; - } - - private synchronized void doIncrement(int index, long delta) { - long prevValue = backing.computeIfAbsent(index, k -> new AtomicLong(0)).getAndAdd(delta); - - // in the case of a decrement result may be 0, so we remove the entry - if (prevValue + delta == 0) { - backing.remove(index); - if (isEmpty()) { - indexStart = NULL_INDEX; - indexEnd = NULL_INDEX; - } else { - // find largest and smallest index to remap window - indexStart = Collections.min(backing.keySet()); - indexEnd = Collections.max(backing.keySet()); - } - } - } - - @Override - public String toString() { - return backing.toString(); - } -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExponentialHistogramAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExponentialHistogramAggregation.java index 9ea0958cee9..4ab3a876435 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExponentialHistogramAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExponentialHistogramAggregation.java @@ -27,6 +27,7 @@ public final class ExponentialHistogramAggregation implements Aggregation, AggregatorFactory { private static final int DEFAULT_MAX_BUCKETS = 160; + private static final int DEFAULT_STARTING_SCALE = 20; private static final Aggregation DEFAULT = new ExponentialHistogramAggregation(DEFAULT_MAX_BUCKETS); @@ -59,7 +60,8 @@ public Aggregator createAggregator( Clock.getDefault(), Runtime.getRuntime().availableProcessors(), RandomSupplier.platformDefault())), - maxBuckets); + maxBuckets, + DEFAULT_STARTING_SCALE); } @Override diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounterTest.java new file mode 100644 index 00000000000..d96d85585f0 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingCircularBufferCounterTest.java @@ -0,0 +1,80 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.aggregator; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; + +class AdaptingCircularBufferCounterTest { + + @Test + void returnsZeroOutsidePopulatedRange() { + AdaptingCircularBufferCounter counter = newCounter(10); + assertThat(counter.get(0)).isEqualTo(0); + assertThat(counter.get(100)).isEqualTo(0); + counter.increment(2, 1); + counter.increment(99, 1); + assertThat(counter.get(0)).isEqualTo(0); + assertThat(counter.get(100)).isEqualTo(0); + } + + @Test + void expandLower() { + AdaptingCircularBufferCounter counter = newCounter(160); + assertThat(counter.increment(10, 1)).isTrue(); + // Add BEFORE the initial see (array index 0) and make sure we wrap around the datastructure. + assertThat(counter.increment(0, 1)).isTrue(); + assertThat(counter.get(10)).isEqualTo(1); + assertThat(counter.get(0)).isEqualTo(1); + assertThat(counter.getIndexStart()).as("index start").isEqualTo(0); + assertThat(counter.getIndexEnd()).as("index end").isEqualTo(10); + // Add AFTER initial entry and just push back end. + assertThat(counter.increment(20, 1)).isTrue(); + assertThat(counter.get(20)).isEqualTo(1); + assertThat(counter.get(10)).isEqualTo(1); + assertThat(counter.get(0)).isEqualTo(1); + assertThat(counter.getIndexStart()).isEqualTo(0); + assertThat(counter.getIndexEnd()).isEqualTo(20); + } + + @Test + void shouldFailAtLimit() { + AdaptingCircularBufferCounter counter = newCounter(160); + assertThat(counter.increment(0, 1)).isTrue(); + assertThat(counter.increment(120, 1)).isTrue(); + // Check state + assertThat(counter.getIndexStart()).as("index start").isEqualTo(0); + assertThat(counter.getIndexEnd()).as("index start").isEqualTo(120); + assertThat(counter.get(0)).as("counter[0]").isEqualTo(1); + assertThat(counter.get(120)).as("counter[120]").isEqualTo(1); + // Adding over the maximum # of buckets + assertThat(counter.increment(3000, 1)).isFalse(); + } + + @Test + void shouldCopyCounters() { + AdaptingCircularBufferCounter counter = newCounter(2); + assertThat(counter.increment(2, 1)).isTrue(); + assertThat(counter.increment(1, 1)).isTrue(); + assertThat(counter.increment(3, 1)).isFalse(); + + AdaptingCircularBufferCounter copy = new AdaptingCircularBufferCounter(counter); + assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); + assertThat(copy.get(2)).as("copy[2]").isEqualTo(1); + assertThat(copy.getMaxSize()).isEqualTo(counter.getMaxSize()); + assertThat(copy.getIndexStart()).isEqualTo(counter.getIndexStart()); + assertThat(copy.getIndexEnd()).isEqualTo(counter.getIndexEnd()); + // Mutate copy and make sure original is unchanged. + assertThat(copy.increment(2, 1)).isTrue(); + assertThat(copy.get(2)).as("copy[2]").isEqualTo(2); + assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); + } + + private static AdaptingCircularBufferCounter newCounter(int maxBuckets) { + return new AdaptingCircularBufferCounter(maxBuckets); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingIntegerArrayTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingIntegerArrayTest.java similarity index 97% rename from sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingIntegerArrayTest.java rename to sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingIntegerArrayTest.java index f168f2abaae..f6609544bb8 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AdaptingIntegerArrayTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/AdaptingIntegerArrayTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.sdk.metrics.internal.state; +package io.opentelemetry.sdk.metrics.internal.aggregator; import static org.assertj.core.api.Assertions.assertThat; diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java index f2baa6b0a20..9c8da1989c0 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java @@ -22,7 +22,6 @@ import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import java.util.Arrays; @@ -50,21 +49,20 @@ class DoubleExponentialHistogramAggregatorTest { @Mock ExemplarReservoir reservoir; + private static final int STARTING_SCALE = 20; private static final DoubleExponentialHistogramAggregator aggregator = - new DoubleExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 160); + new DoubleExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 160, 20); private static final Resource RESOURCE = Resource.getDefault(); private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = InstrumentationScopeInfo.empty(); private static final MetricDescriptor METRIC_DESCRIPTOR = MetricDescriptor.create("name", "description", "unit"); - private static final ExponentialBucketStrategy EXPONENTIAL_BUCKET_STRATEGY = - ExponentialBucketStrategy.newStrategy(160, ExponentialCounterFactory.mapCounter()); private static Stream provideAggregator() { return Stream.of( aggregator, new DoubleExponentialHistogramAggregator( - ExemplarReservoir::doubleNoSamples, EXPONENTIAL_BUCKET_STRATEGY)); + ExemplarReservoir::doubleNoSamples, 160, STARTING_SCALE)); } private static int valueToIndex(int scale, double value) { @@ -91,12 +89,10 @@ void createHandle() { .doAccumulateThenReset(Collections.emptyList()); assertThat(accumulation.getPositiveBuckets()) .isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class); - assertThat(accumulation.getPositiveBuckets().getScale()) - .isEqualTo(EXPONENTIAL_BUCKET_STRATEGY.getStartingScale()); + assertThat(accumulation.getPositiveBuckets().getScale()).isEqualTo(STARTING_SCALE); assertThat(accumulation.getNegativeBuckets()) .isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class); - assertThat(accumulation.getNegativeBuckets().getScale()) - .isEqualTo(EXPONENTIAL_BUCKET_STRATEGY.getStartingScale()); + assertThat(accumulation.getNegativeBuckets().getScale()).isEqualTo(STARTING_SCALE); } @Test @@ -203,7 +199,7 @@ void testRecordingsAtLimits(DoubleExponentialHistogramAggregator aggregator) { @Test void testExemplarsInAccumulation() { DoubleExponentialHistogramAggregator agg = - new DoubleExponentialHistogramAggregator(() -> reservoir, 160); + new DoubleExponentialHistogramAggregator(() -> reservoir, 160, STARTING_SCALE); Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = @@ -319,9 +315,7 @@ void testMergeAccumulationMinAndMax() { private static ExponentialHistogramAccumulation createAccumulation( boolean hasMinMax, double min, double max) { - DoubleExponentialHistogramBuckets buckets = - new DoubleExponentialHistogramBuckets( - 0, 1, ExponentialCounterFactory.circularBufferCounter()); + DoubleExponentialHistogramBuckets buckets = new DoubleExponentialHistogramBuckets(0, 1); return ExponentialHistogramAccumulation.create( 0, 0, hasMinMax, min, max, buckets, buckets, 0, Collections.emptyList()); } @@ -443,7 +437,7 @@ void testToMetricData() { Mockito.when(reservoirSupplier.get()).thenReturn(reservoir); DoubleExponentialHistogramAggregator cumulativeAggregator = - new DoubleExponentialHistogramAggregator(reservoirSupplier, 160); + new DoubleExponentialHistogramAggregator(reservoirSupplier, 160, STARTING_SCALE); AggregatorHandle aggregatorHandle = cumulativeAggregator.createHandle(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java index 0fa829a8d9d..5ff74201951 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java @@ -8,13 +8,10 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import java.util.Arrays; import java.util.Collections; -import java.util.stream.Stream; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.api.Test; /** * These are extra test cases for buckets. Much of this class is already tested via more complex @@ -22,36 +19,26 @@ */ class DoubleExponentialHistogramBucketsTest { - static Stream bucketStrategies() { - return Stream.of( - ExponentialBucketStrategy.newStrategy(160, ExponentialCounterFactory.mapCounter()), - ExponentialBucketStrategy.newStrategy( - 160, ExponentialCounterFactory.circularBufferCounter())); - } - - @ParameterizedTest - @MethodSource("bucketStrategies") - void testRecordSimple(ExponentialBucketStrategy buckets) { + @Test + void record_Valid() { // Can only effectively test recording of one value here due to downscaling required. // More complex recording/downscaling operations are tested in the aggregator. - DoubleExponentialHistogramBuckets b = buckets.newBuckets(); + DoubleExponentialHistogramBuckets b = newBuckets(); b.record(1); b.record(1); b.record(1); MetricAssertions.assertThat(b).hasTotalCount(3).hasCounts(Collections.singletonList(3L)); } - @ParameterizedTest - @MethodSource("bucketStrategies") - void testRecordShouldError(ExponentialBucketStrategy buckets) { - DoubleExponentialHistogramBuckets b = buckets.newBuckets(); + @Test + void record_Zero_Throws() { + DoubleExponentialHistogramBuckets b = newBuckets(); assertThatThrownBy(() -> b.record(0)).isInstanceOf(IllegalStateException.class); } - @ParameterizedTest - @MethodSource("bucketStrategies") - void testDownscale(ExponentialBucketStrategy buckets) { - DoubleExponentialHistogramBuckets b = buckets.newBuckets(); + @Test + void downscale_Valid() { + DoubleExponentialHistogramBuckets b = newBuckets(); b.downscale(20); // scale of zero is easy to reason with without a calculator b.record(1); b.record(2); @@ -63,18 +50,16 @@ void testDownscale(ExponentialBucketStrategy buckets) { .hasOffset(-1); } - @ParameterizedTest - @MethodSource("bucketStrategies") - void testDownscaleShouldError(ExponentialBucketStrategy buckets) { - DoubleExponentialHistogramBuckets b = buckets.newBuckets(); + @Test + void downscale_NegativeIncrement_Throws() { + DoubleExponentialHistogramBuckets b = newBuckets(); assertThatThrownBy(() -> b.downscale(-1)).isInstanceOf(IllegalStateException.class); } - @ParameterizedTest - @MethodSource("bucketStrategies") - void testEqualsAndHashCode(ExponentialBucketStrategy buckets) { - DoubleExponentialHistogramBuckets a = buckets.newBuckets(); - DoubleExponentialHistogramBuckets b = buckets.newBuckets(); + @Test + void equalsAndHashCode() { + DoubleExponentialHistogramBuckets a = newBuckets(); + DoubleExponentialHistogramBuckets b = newBuckets(); assertThat(a).isNotNull(); assertThat(b).isEqualTo(a); @@ -92,9 +77,9 @@ void testEqualsAndHashCode(ExponentialBucketStrategy buckets) { assertThat(a).hasSameHashCodeAs(b); // Now we start to play with altering offset, but having same effective counts. - DoubleExponentialHistogramBuckets empty = buckets.newBuckets(); + DoubleExponentialHistogramBuckets empty = newBuckets(); empty.downscale(20); - DoubleExponentialHistogramBuckets c = buckets.newBuckets(); + DoubleExponentialHistogramBuckets c = newBuckets(); c.downscale(20); assertThat(c.record(1)).isTrue(); // Record can fail if scale is not set correctly. @@ -102,14 +87,17 @@ void testEqualsAndHashCode(ExponentialBucketStrategy buckets) { assertThat(c.getTotalCount()).isEqualTo(2); } - @ParameterizedTest - @MethodSource("bucketStrategies") - void testToString(ExponentialBucketStrategy buckets) { + @Test + void toString_Valid() { // Note this test may break once difference implementations for counts are developed since // the counts may have different toStrings(). - DoubleExponentialHistogramBuckets b = buckets.newBuckets(); + DoubleExponentialHistogramBuckets b = newBuckets(); b.record(1); assertThat(b.toString()) .isEqualTo("DoubleExponentialHistogramBuckets{scale: 20, offset: -1, counts: {-1=1} }"); } + + private static DoubleExponentialHistogramBuckets newBuckets() { + return new DoubleExponentialHistogramBuckets(20, 160); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterTest.java deleted file mode 100644 index e39887cc652..00000000000 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ExponentialCounterTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.state; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.util.stream.Stream; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; - -public class ExponentialCounterTest { - - static Stream counterProviders() { - return Stream.of( - ExponentialCounterFactory.mapCounter(), ExponentialCounterFactory.circularBufferCounter()); - } - - @ParameterizedTest - @MethodSource("counterProviders") - void returnsZeroOutsidePopulatedRange(ExponentialCounterFactory counterFactory) { - ExponentialCounter counter = counterFactory.newCounter(10); - assertThat(counter.get(0)).isEqualTo(0); - assertThat(counter.get(100)).isEqualTo(0); - counter.increment(2, 1); - counter.increment(99, 1); - assertThat(counter.get(0)).isEqualTo(0); - assertThat(counter.get(100)).isEqualTo(0); - } - - @ParameterizedTest - @MethodSource("counterProviders") - void expandLower(ExponentialCounterFactory counterFactory) { - ExponentialCounter counter = counterFactory.newCounter(160); - assertThat(counter.increment(10, 1)).isTrue(); - // Add BEFORE the initial see (array index 0) and make sure we wrap around the datastructure. - assertThat(counter.increment(0, 1)).isTrue(); - assertThat(counter.get(10)).isEqualTo(1); - assertThat(counter.get(0)).isEqualTo(1); - assertThat(counter.getIndexStart()).as("index start").isEqualTo(0); - assertThat(counter.getIndexEnd()).as("index end").isEqualTo(10); - // Add AFTER initial entry and just push back end. - assertThat(counter.increment(20, 1)).isTrue(); - assertThat(counter.get(20)).isEqualTo(1); - assertThat(counter.get(10)).isEqualTo(1); - assertThat(counter.get(0)).isEqualTo(1); - assertThat(counter.getIndexStart()).isEqualTo(0); - assertThat(counter.getIndexEnd()).isEqualTo(20); - } - - @ParameterizedTest - @MethodSource("counterProviders") - void shouldFailAtLimit(ExponentialCounterFactory counterFactory) { - ExponentialCounter counter = counterFactory.newCounter(160); - assertThat(counter.increment(0, 1)).isTrue(); - assertThat(counter.increment(120, 1)).isTrue(); - // Check state - assertThat(counter.getIndexStart()).as("index start").isEqualTo(0); - assertThat(counter.getIndexEnd()).as("index start").isEqualTo(120); - assertThat(counter.get(0)).as("counter[0]").isEqualTo(1); - assertThat(counter.get(120)).as("counter[120]").isEqualTo(1); - // Adding over the maximum # of buckets - assertThat(counter.increment(3000, 1)).isFalse(); - } - - @ParameterizedTest - @MethodSource("counterProviders") - void shouldCopyCounters(ExponentialCounterFactory counterFactory) { - ExponentialCounter counter = counterFactory.newCounter(2); - assertThat(counter.increment(2, 1)).isTrue(); - assertThat(counter.increment(1, 1)).isTrue(); - assertThat(counter.increment(3, 1)).isFalse(); - - ExponentialCounter copy = counterFactory.copy(counter); - assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); - assertThat(copy.get(2)).as("copy[2]").isEqualTo(1); - assertThat(copy.getMaxSize()).isEqualTo(counter.getMaxSize()); - assertThat(copy.getIndexStart()).isEqualTo(counter.getIndexStart()); - assertThat(copy.getIndexEnd()).isEqualTo(counter.getIndexEnd()); - // Mutate copy and make sure original is unchanged. - assertThat(copy.increment(2, 1)).isTrue(); - assertThat(copy.get(2)).as("copy[2]").isEqualTo(2); - assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); - } - - @ParameterizedTest - @MethodSource("counterProviders") - void shouldCopyMapCounters(ExponentialCounterFactory counterFactory) { - ExponentialCounter counter = ExponentialCounterFactory.mapCounter().newCounter(2); - assertThat(counter.increment(2, 1)).isTrue(); - assertThat(counter.increment(1, 1)).isTrue(); - assertThat(counter.increment(3, 1)).isFalse(); - - ExponentialCounter copy = counterFactory.copy(counter); - assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); - assertThat(copy.get(2)).as("copy[2]").isEqualTo(1); - assertThat(copy.getMaxSize()).isEqualTo(counter.getMaxSize()); - assertThat(copy.getIndexStart()).isEqualTo(counter.getIndexStart()); - assertThat(copy.getIndexEnd()).isEqualTo(counter.getIndexEnd()); - // Mutate copy and make sure original is unchanged. - assertThat(copy.increment(2, 1)).isTrue(); - assertThat(copy.get(2)).as("copy[2]").isEqualTo(2); - assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); - } - - @ParameterizedTest - @MethodSource("counterProviders") - void shouldCopyCircularBufferCounters(ExponentialCounterFactory counterFactory) { - ExponentialCounter counter = ExponentialCounterFactory.circularBufferCounter().newCounter(2); - assertThat(counter.increment(2, 1)).isTrue(); - assertThat(counter.increment(1, 1)).isTrue(); - assertThat(counter.increment(3, 1)).isFalse(); - - ExponentialCounter copy = counterFactory.copy(counter); - assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); - assertThat(copy.get(2)).as("copy[2]").isEqualTo(1); - assertThat(copy.getMaxSize()).isEqualTo(counter.getMaxSize()); - assertThat(copy.getIndexStart()).isEqualTo(counter.getIndexStart()); - assertThat(copy.getIndexEnd()).isEqualTo(counter.getIndexEnd()); - // Mutate copy and make sure original is unchanged. - assertThat(copy.increment(2, 1)).isTrue(); - assertThat(copy.get(2)).as("copy[2]").isEqualTo(2); - assertThat(counter.get(2)).as("counter[2]").isEqualTo(1); - } -}