From 907e7e8860b700f48c1bdc3ed8116cf1125ff28f Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Tue, 9 Aug 2022 19:15:06 +0200 Subject: [PATCH] Add exponential histogram Fixes #2963 --- .../exponential_histogram/aggregation.py | 554 ++++++++++ .../exponential_histogram/buckets.py | 234 +++++ .../sdk/metrics/_internal/point.py | 41 + ...xponential_bucket_histogram_aggregation.py | 962 ++++++++++++++++++ 4 files changed, 1791 insertions(+) create mode 100644 opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/aggregation.py create mode 100644 opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py create mode 100644 opentelemetry-sdk/tests/metrics/exponential_histogram/test_exponential_bucket_histogram_aggregation.py diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/aggregation.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/aggregation.py new file mode 100644 index 00000000000..9f72aadf50a --- /dev/null +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/aggregation.py @@ -0,0 +1,554 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from math import inf +from typing import Optional, TypeVar, Union, Tuple + +from opentelemetry.sdk.metrics._internal.aggregation import ( + AggregationTemporality, + _Aggregation, +) +from opentelemetry.sdk.metrics._internal.exponential_histogram.buckets import Buckets +from opentelemetry.sdk.metrics._internal.exponential_histogram.mapping.exponent_mapping import ( + ExponentMapping, +) +from opentelemetry.sdk.metrics._internal.exponential_histogram.mapping.logarithm_mapping import ( + LogarithmMapping, +) +from opentelemetry.sdk.metrics._internal.measurement import Measurement +from opentelemetry.sdk.metrics._internal.point import Buckets as BucketsPoint +from opentelemetry.sdk.metrics._internal.point import ( + ExponentialHistogramDataPoint, +) +from opentelemetry.sdk.metrics._internal.point import ( + Histogram as HistogramPoint, +) +from opentelemetry.sdk.metrics._internal.point import ( + HistogramDataPoint, + NumberDataPoint, +) +from opentelemetry.util.types import Attributes + +_DataPointVarT = TypeVar("_DataPointVarT", NumberDataPoint, HistogramDataPoint) + + +# pylint: disable=protected-access +class _ExponentialBucketHistogramAggregation(_Aggregation[HistogramPoint]): + # min_max_size is the smallest reasonable configuration, which is small + # enough to contain the entire normal floating point range at min + # scale. + _min_max_size = 2 + + # max_max_size is an arbitrary limit meant to limit accidental use of + # giant histograms. + _max_max_size = 16384 + + def __init__( + self, + attributes: Attributes, + start_time_unix_nano: int, + # This is the default maximum number of buckets per positive or + # negative number range. The value 160 is specified by OpenTelemetry. + # See the derivation here: + # https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#exponential-bucket-histogram-aggregation) + max_size: int = 160, + ): + super().__init__(attributes) + # maxSize is the maximum capacity of the positive and negative ranges. + # it is set by Init(), preserved by Copy and Move.) + + if max_size < self._min_max_size: + raise Exception("size {max_size} is smaller than {min_max_size}") + + if max_size > self._max_max_size: + raise Exception("size {max_size} is larter than {max_max_size}") + + self._max_size = max_size + + # _sum is the sum of all calls to aggregate reflected in the + # aggregator. + self._sum = 0 + + # count is incremented by 1 per call to aggregate. + self._count = 0 + + # zero_count is incremented by 1 when the measured value is exactly 0. + self._zero_count = 0 + + # _min is set when count > 0 + self._min = 0 + + # _max is set when count > 0 + self._max = 0 + + # _positive holds the positive values + self._positive = Buckets() + + # _negative holds the negative values by their absolute value + self._negative = Buckets() + + # _mapping corresponds to the current scale, is shared by both positive + # and negative ranges. + + self._mapping = LogarithmMapping(LogarithmMapping._max_scale) + self._instrument_temporality = AggregationTemporality.DELTA + self._start_time_unix_nano = start_time_unix_nano + + @property + def _scale(self): + if self._count == self._zero_count: + return 0 + + return self._mapping.scale + + def aggregate(self, measurement: Measurement) -> None: + self._update_by_incr(measurement.value, 1) + + def collect( + self, + aggregation_temporality: AggregationTemporality, + collection_start_nano: int, + ) -> Optional[_DataPointVarT]: + """ + Atomically return a point for the current value of the metric. + """ + + with self._lock: + if not any(self._negative._counts) and not any( + self._positive._counts + ): + return None + + start_time_unix_nano = self._start_time_unix_nano + sum_ = self._sum + max_ = self._max + min_ = self._min + + self._negative._counts = [0] + self._positive._counts = [0] + self._start_time_unix_nano = collection_start_nano + self._sum = 0 + self._min = inf + self._max = -inf + + current_point = ExponentialHistogramDataPoint( + attributes=self._attributes, + start_time_unix_nano=start_time_unix_nano, + time_unix_nano=collection_start_nano, + count=( + sum(self._negative._counts) + + sum(self._positive._counts) + + self._zero_count + ), + sum=sum_, + scale=self._scale, + zero_count=self._zero_count, + positive=BucketsPoint( + self._positive.offset, self._positive._counts + ), + negative=BucketsPoint( + self._negative.offset, self._negative._counts + ), + # FIXME: Find the right value for flags + flags=0, + min=min_, + max=max_, + ) + + if self._previous_point is None or ( + self._instrument_temporality is aggregation_temporality + ): + self._previous_point = current_point + return current_point + + max_ = current_point.max + min_ = current_point.min + + if aggregation_temporality is AggregationTemporality.CUMULATIVE: + start_time_unix_nano = self._previous_point.start_time_unix_nano + sum_ = current_point.sum + self._previous_point.sum + # Only update min/max on delta -> cumulative + max_ = max(current_point.max, self._previous_point.max) + min_ = min(current_point.min, self._previous_point.min) + + negative_counts = [ + curr_count + prev_count + for curr_count, prev_count in zip( + current_point.negative.bucket_counts, + self._previous_point.negative.bucket_counts, + ) + ] + positive_counts = [ + curr_count + prev_count + for curr_count, prev_count in zip( + current_point.positive.bucket_counts, + self._previous_point.positive.bucket_counts, + ) + ] + else: + start_time_unix_nano = self._previous_point.time_unix_nano + sum_ = current_point.sum - self._previous_point.sum + + negative_counts = [ + curr_count + prev_count + for curr_count, prev_count in zip( + current_point.negative.bucket_counts, + self._previous_point.negative.bucket_counts, + ) + ] + positive_counts = [ + curr_count + prev_count + for curr_count, prev_count in zip( + current_point.positive.bucket_counts, + self._previous_point.positive.bucket_counts, + ) + ] + + current_point = ExponentialHistogramDataPoint( + attributes=self._attributes, + start_time_unix_nano=start_time_unix_nano, + time_unix_nano=current_point.time_unix_nano, + count=( + sum(negative_counts) + sum(positive_counts) + self._zero_count + ), + sum=sum_, + scale=self._scale, + zero_count=self._zero_count, + positive=BucketsPoint(self._positive.offset, positive_counts), + negative=BucketsPoint(self._negative.offset, negative_counts), + # FIXME: Find the right value for flags + flags=0, + min=min_, + max=max_, + ) + + self._previous_point = current_point + return current_point + + def _clear(self) -> None: + self._positive.clear() + self._negative.clear() + self._sum = 0 + self._count = 0 + self._zero_count = 0 + self._min = 0 + self._max = 0 + self._mapping = LogarithmMapping(LogarithmMapping._max_scale) + + def _swap(self, other: "_ExponentialBucketHistogramAggregation") -> None: + + for attribute in [ + "_positive", + "_negative", + "_sum", + "_count", + "_zero_count", + "_min", + "_max", + "_mapping", + ]: + temp = getattr(self, attribute) + setattr(self, attribute, getattr(other, attribute)) + setattr(other, attribute, temp) + + def _copy_into( + self, other: "_ExponentialBucketHistogramAggregation" + ) -> None: + other._clear() + + for attribute in [ + "_positive", + "_negative", + "_sum", + "_count", + "_zero_count", + "_min", + "_max", + "_mapping", + ]: + setattr(other, attribute, getattr(self, attribute)) + + def _update_by_incr(self, number: Union[int, float], incr: int) -> None: + + value = float(number) + + if self._count == 0: + self._min = number + self._max = number + + else: + if number < self._min: + self._min = number + if number > self._max: + self._max = number + + self._count += incr + + if value == 0: + self._zero_count += incr + return + + self._sum += number * incr + + if value > 0: + buckets = self._positive + else: + value = -value + buckets = self._negative + + self._update(buckets, value, incr) + + def _downscale(self, change: int) -> None: + """ + Subtracts change from the current mapping scale + """ + + if change == 0: + return + + if change < 0: + raise Exception(f"Impossible change of scale: {change}") + + new_scale = self._mapping.scale - change + + self._positive.downscale(change) + self._negative.downscale(change) + + if new_scale <= 0: + mapping = ExponentMapping(new_scale) + else: + mapping = LogarithmMapping(new_scale) + + self._mapping = mapping + + # pylint: disable=no-self-use + def _change_scale(self, high: int, low: int, size: int) -> int: + """ + Calculates how much downscaling is needed by shifting the high and low + values until they are separated by no more than size. + """ + + change = 0 + + while high - low >= size: + high = high >> 1 + low = low >> 1 + + change += 1 + return change + + def _update(self, buckets: Buckets, value: float, incr: int) -> None: + + index = self._mapping.map_to_index(value) + + low, high, success = self._increment_index_by(buckets, index, incr) + + if success: + return + + self._downscale(self._change_scale(high, low, self._max_size)) + + index = self._mapping.map_to_index(value) + + _, _, success = self._increment_index_by(buckets, index, incr) + + if not success: + raise Exception("Downscale logic error") + + def _increment_index_by( + self, buckets: Buckets, index: int, incr: int + ) -> tuple: + """ + Determines if the index lies inside the current range + [indexStart, indexEnd] and, if not, returns the minimum size (up to + maxSize) will satisfy the new value.)] + + Returns a tuple: low, high, success + """ + + if incr == 0: + # Skipping a bunch of work for 0 increment. This + # happens when merging sparse data, for example. + # This also happens UpdateByIncr is used with a 0 + # increment, means it can be safely skipped. + + return 0, 0, True + + if buckets.len() == 0: + # Go initializes its backing here if it hasn't been done before. + # I think we don't need to worry about that because the backing + # has been initialized already. + buckets._index_start = index + buckets._index_end = index + buckets._index_base = index + + elif index < buckets._index_start: + span = buckets._index_end - index + + if span >= self._max_size: + # rescaling needed, mapped value to the right + + return index, buckets._index_end, False + + if span >= buckets._backing.size(): + self._grow(buckets, span + 1) + + buckets._index_start = index + + elif index > buckets._index_end: + span = index - buckets._index_start + + if span >= self._max_size: + # rescaling needed, mapped value to the right + + return buckets._index_start, index, False + + if span >= buckets._backing.size(): + + self._grow(buckets, span + 1) + + buckets._index_end = index + + bucket_index = index - buckets._index_base + + if bucket_index < 0: + bucket_index += buckets._backing.size() + + buckets.increment_bucket(bucket_index, incr) + + return 0, 0, True + + def _grow(self, buckets: Buckets, needed: int): + """ + Resizes the backing array by doubling in size up to maxSize. + this extends the array with a bunch of zeros and copies the + existing counts to the same position. + """ + + size = buckets._backing.size() + bias = buckets._index_base - buckets._index_start + old_positive_limit = size - bias + new_size = self._power_of_two_rounded_up(needed) + if new_size > self._max_size: + new_size = self._max_size + + new_positive_limit = new_size - bias + buckets._backing.grow_to( + new_size, old_positive_limit, new_positive_limit + ) + + def _low_high_at_scale(self, buckets: Buckets, scale: int) -> tuple: + """ + Returns low, high + """ + + if buckets.len() == 0: + return 0, -1 + + shift = self._scale - scale + + return buckets._index_start >> shift, buckets._index_end >> shift + + def _merge_from(self, other: "_ExponentialBucketHistogramAggregation"): + + if self._count == 0: + self._min = other._min + self._max = other._max + + elif other._count != 0: + if other._min < self._min: + self._min = other._min + if other._max > self._max: + self._max = other._max + + self._sum += other._sum + self._count += other._count + self._zero_count += other._zero_count + + min_scale = min(self._scale, other._scale) + + low_positive, high_positive = self._combine_low_high( + *self._low_high_at_scale(self._positive, min_scale), + *other._low_high_at_scale(other._positive, min_scale), + ) + + low_negative, high_negative = self._combine_low_high( + *self._low_high_at_scale(self._negative, min_scale), + *other._low_high_at_scale(other._negative, min_scale), + ) + + min_scale = min( + min_scale + - self._change_scale(high_positive, low_positive, self._max_size), + min_scale + - self._change_scale(high_negative, low_negative, self._max_size), + ) + + self._downscale(self._scale - min_scale) + + self._merge_buckets(self._positive, other, other._positive, min_scale) + self._merge_buckets(self._negative, other, other._negative, min_scale) + + def _merge_buckets( + self, + mine: Buckets, + other: "_ExponentialBucketHistogramAggregation", + theirs: Buckets, + scale: int, + ) -> None: + + their_offset = theirs.offset() + their_change = other._scale - scale + + for index in range(theirs.len()): + + _, _, success = self._increment_index_by( + mine, (their_offset + index) >> their_change, theirs.at(index) + ) + + if not success: + raise Exception("Incorrect merge scale") + + @staticmethod + def _combine_low_high( + a_low: int, a_high: int, b_low: int, o_high: int + ) -> Tuple[int, int]: + """ + Returns the combination of low and high pairs + """ + if b_low > o_high: + return a_low, a_high + + if a_low > a_high: + return b_low, o_high + + return min(a_low, b_low), max(a_high, o_high) + + @staticmethod + def _power_of_two_rounded_up(number: int) -> int: + """ + Computes the least power of two that is >= number. + """ + + number = number - 1 + + number |= number >> 1 + number |= number >> 2 + number |= number >> 4 + number |= number >> 8 + number |= number >> 16 + + number = number + 1 + + return number diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py new file mode 100644 index 00000000000..3c924160527 --- /dev/null +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py @@ -0,0 +1,234 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod + + +class BucketsBacking(ABC): + @abstractmethod + def size(self) -> int: + """ + Returns the physical size of the backing array, which is + >= buckets.Len() the number allocated. + """ + + @abstractmethod + def grow_to( + self, new_size: int, old_positive_limit: int, new_positive_limit: int + ) -> None: + """ + Grows the backing array into a new size and copies old entries into + their correct new positions. + """ + + @abstractmethod + def reverse(self, start: int, end: int) -> None: + """ + Reverses the items in the backing array from [start, end). + """ + + @abstractmethod + def empty_bucket(self, src: int) -> int: + """ + Empties the count from a bucket for moving into another one + """ + + @abstractmethod + def increment(self, bucket_index: int, increment: int) -> bool: + """ + Increments a bucket by increment + """ + + @abstractmethod + def count_at(self, pos: int) -> int: + """ + Returns the count at a specific bucket. + """ + + @abstractmethod + def reset(self) -> None: + """ + Resets all buckets to zero count + """ + + +class BucketsVarWidth(BucketsBacking): + def __init__(self): + + self._counts = [0] + + def size(self) -> int: + """ + Returns the physical size of the backing array, which is + >= buckets.Len() the number allocated. + """ + return len(self._counts) + + def grow_to( + self, new_size: int, old_positive_limit: int, new_positive_limit: int + ) -> None: + """ + Grows the backing array into a new size and copies old entries into + their correct new positions. + """ + # FIXME this follows Go implementation maybe too closely. Since we + # don't need to request memory for a larger list, maybe this can be + # implemented in a more pythonical way. + tmp = [0] * new_size + tmp[new_positive_limit:] = self._counts[old_positive_limit:] + tmp[0:old_positive_limit] = self._counts[0:old_positive_limit] + self._counts = tmp + + def reverse(self, start: int, end: int) -> None: + """ + Reverses the items in the backing array from [start, end[. + """ + + for index, value in enumerate(reversed(self._counts[start:end])): + self._counts[index + start] = value + + def empty_bucket(self, src: int) -> int: + """ + Empties the count from a bucket for moving into another one + returns the count from that bucket before it was set to zero. + """ + + temp = self._counts[src] + self._counts[src] = 0 + return temp + + def increment(self, bucket_index: int, increment: int) -> None: + """ + Increments a bucket by increment + """ + + self._counts[bucket_index] += increment + + def count_at(self, pos: int) -> int: + """ + Returns the count at a specific bucket. + """ + + return self._counts[pos] + + def reset(self) -> None: + """ + Resets all buckets to zero count + """ + + self._counts = [0] * len(self._counts) + + +class Buckets: + def __init__(self): + self._backing = BucketsVarWidth() + + # The term "index" refers to the number of the + # histogram bucket used to determine its boundaries. + # The lower-boundary of a bucket is determined by + # formula base**index and the upper-boundary of a + # bucket is base**(index+1). Index values are signed + # to account for values less than or equal to 1. + + # Index of the 0th position in the backing array: backing[0] is the + # count in the bucket with index self._index_base. + self._index_base = 0 + + # indexStart is the smallest index value represented in the backing + # array. + self._index_start = 0 + + # indexEnd is the largest index value represented in the backing array. + self._index_end = 0 + + def offset(self) -> int: + return self._index_start + + def len(self) -> int: + if self._backing.size() == 0: + return 0 + + if self._index_end == self._index_start and self.at(0) == 0: + return 0 + + return self._index_end - self._index_start + 1 + + # pylint: disable=invalid-name + def at(self, position: int) -> int: + bias = self._index_base - self._index_start + + if position < bias: + position += self._backing.size() + + position -= bias + + return self._backing.count_at(position) + + def clear(self) -> None: + + self._index_base = 0 + self._index_start = 0 + self._index_end = 0 + + self._backing.reset() + + # pylint: disable=invalid-name + def downscale(self, by: int) -> None: + """ + Rotates, then collapses 2**`by`-to-1 buckets. + """ + + bias = self._index_base - self._index_start + + if bias != 0: + + self._index_base = self._index_start + + self._backing.reverse(0, self._backing.size()) + self._backing.reverse(0, bias) + self._backing.reverse(bias, self._backing.size()) + + size = 1 + self._index_end - self._index_start + each = 1 << by + inpos = 0 + outpos = 0 + + pos = self._index_start + + while pos <= self._index_end: + mod = pos % each + if mod < 0: + mod += each + + index = mod + + while index < each and inpos < size: + + if outpos != inpos: + self.increment_bucket( + outpos, self._backing.empty_bucket(inpos) + ) + + inpos += 1 + pos += 1 + index += 1 + + outpos += 1 + + self._index_start >>= by + self._index_end >>= by + self._index_base = self._index_start + + def increment_bucket(self, bucket_index: int, incr: int): + self._backing.increment(bucket_index, incr) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py index b4d813accaf..ef38e8d593b 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/point.py @@ -60,6 +60,47 @@ def to_json(self, indent=4) -> str: return dumps(asdict(self), indent=indent) +@dataclass(frozen=True) +class Buckets: + offset: int + bucket_counts: Sequence[int] + + +@dataclass(frozen=True) +class ExponentialHistogramDataPoint: + """Single data point in a timeseries whose boundaries are defined by an + exponential function. This timeseries describes the time-varying scalar + value of a metric. + """ + attributes: Attributes + start_time_unix_nano: int + time_unix_nano: int + count: int + sum: Union[int, float] + scale: int + zero_count: int + positive: Buckets + negative: Buckets + flags: int + min: float + max: float + + def to_json(self, indent=4) -> str: + return dumps(asdict(self), indent=indent) + + +@dataclass(frozen=True) +class ExponentialHistogram: + """Represents the type of a metric that is calculated by aggregating as an + ExponentialHistogram of all reported measurements over a time interval. + """ + + data_points: Sequence[ExponentialHistogramDataPoint] + aggregation_temporality: ( + "opentelemetry.sdk.metrics.export.AggregationTemporality" + ) + + @dataclass(frozen=True) class Sum: """Represents the type of a scalar metric that is calculated as a sum of diff --git a/opentelemetry-sdk/tests/metrics/exponential_histogram/test_exponential_bucket_histogram_aggregation.py b/opentelemetry-sdk/tests/metrics/exponential_histogram/test_exponential_bucket_histogram_aggregation.py new file mode 100644 index 00000000000..e3e1df0abfb --- /dev/null +++ b/opentelemetry-sdk/tests/metrics/exponential_histogram/test_exponential_bucket_histogram_aggregation.py @@ -0,0 +1,962 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from itertools import permutations +from math import ldexp +from random import random, seed +from sys import float_info +from typing import Sequence +from unittest import TestCase +from unittest.mock import Mock + +from opentelemetry.sdk.metrics._internal.exponential_histogram.aggregation import ( + _ExponentialBucketHistogramAggregation, +) +from opentelemetry.sdk.metrics._internal.exponential_histogram.buckets import Buckets +from opentelemetry.sdk.metrics._internal.exponential_histogram.mapping.exponent_mapping import ( + ExponentMapping, +) +from opentelemetry.sdk.metrics._internal.exponential_histogram.mapping.ieee_754 import ( + MAX_NORMAL_EXPONENT, + MIN_NORMAL_EXPONENT, +) +from opentelemetry.sdk.metrics._internal.exponential_histogram.mapping.logarithm_mapping import ( + LogarithmMapping, +) +from opentelemetry.sdk.metrics._internal.measurement import Measurement + + +def get_counts(buckets: Buckets) -> int: + + counts = [] + + for index in range(buckets.len()): + counts.append(buckets.at(index)) + + return counts + + +def center_val(mapping: ExponentMapping, index: int) -> float: + return ( + mapping.get_lower_boundary(index) + + mapping.get_lower_boundary(index + 1) + ) / 2 + + +class TestExponentialBucketHistogramAggregation(TestCase): + def assertInEpsilon(self, first, second, epsilon): + self.assertLessEqual(first, (second * (1 + epsilon))) + self.assertGreaterEqual(first, (second * (1 - epsilon))) + + def require_equal(self, a, b): + + if a._sum == 0 or b._sum == 0: + self.assertAlmostEqual(a._sum, b._sum, 1e-6) + else: + self.assertInEpsilon(a._sum, b._sum, 1e-6) + + self.assertEqual(a._count, b._count) + self.assertEqual(a._zero_count, b._zero_count) + self.assertEqual(a._scale, b._scale) + + self.assertEqual(a._positive.len(), b._positive.len()) + self.assertEqual(a._negative.len(), b._negative.len()) + + for index in range(a._positive.len()): + self.assertEqual(a._positive.at(index), b._positive.at(index)) + + for index in range(a._negative.len()): + self.assertEqual(a._negative.at(index), b._negative.at(index)) + + def test_alternating_growth_0(self): + """ + Tests insertion of [2, 4, 1]. The index of 2 (i.e., 0) becomes + `indexBase`, the 4 goes to its right and the 1 goes in the last + position of the backing array. With 3 binary orders of magnitude + and MaxSize=4, this must finish with scale=0; with minimum value 1 + this must finish with offset=-1 (all scales). + + """ + + # The corresponding Go test is TestAlternatingGrowth1 where: + # agg := NewFloat64(NewConfig(WithMaxSize(4))) + # agg is an instance of github.com/lightstep/otel-launcher-go/lightstep/sdk/metric/aggregator/histogram/structure.Histogram[float64] + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + + exponential_histogram_aggregation.aggregate(Measurement(2, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(4, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(1, Mock())) + + self.assertEqual( + exponential_histogram_aggregation._positive.offset(), -1 + ) + self.assertEqual(exponential_histogram_aggregation._scale, 0) + self.assertEqual( + get_counts(exponential_histogram_aggregation._positive), [1, 1, 1] + ) + + def test_alternating_growth_1(self): + """ + Tests insertion of [2, 2, 4, 1, 8, 0.5]. The test proceeds as¶ + above but then downscales once further to scale=-1, thus index -1¶ + holds range [0.25, 1.0), index 0 holds range [1.0, 4), index 1¶ + holds range [4, 16).¶ + """ + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + + exponential_histogram_aggregation.aggregate(Measurement(2, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(2, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(2, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(1, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(8, Mock())) + exponential_histogram_aggregation.aggregate(Measurement(0.5, Mock())) + + self.assertEqual( + exponential_histogram_aggregation._positive.offset(), -1 + ) + self.assertEqual(exponential_histogram_aggregation._scale, -1) + self.assertEqual( + get_counts(exponential_histogram_aggregation._positive), [2, 3, 1] + ) + + def test_permutations(self): + """ + Tests that every permutation of certain sequences with maxSize=2 + results¶ in the same scale=-1 histogram. + """ + + for test_values, expected in [ + [ + [0.5, 1.0, 2.0], + { + "scale": -1, + "offset": -1, + "len": 2, + "at_0": 2, + "at_1": 1, + }, + ], + [ + [1.0, 2.0, 4.0], + { + "scale": -1, + "offset": -1, + "len": 2, + "at_0": 1, + "at_1": 2, + }, + ], + [ + [0.25, 0.5, 1], + { + "scale": -1, + "offset": -2, + "len": 2, + "at_0": 1, + "at_1": 2, + }, + ], + ]: + + for permutation in permutations(test_values): + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=2 + ) + ) + + for value in permutation: + + exponential_histogram_aggregation.aggregate( + Measurement(value, Mock()) + ) + + self.assertEqual( + exponential_histogram_aggregation._scale, expected["scale"] + ) + self.assertEqual( + exponential_histogram_aggregation._positive.offset(), + expected["offset"], + ) + self.assertEqual( + exponential_histogram_aggregation._positive.len(), + expected["len"], + ) + self.assertEqual( + exponential_histogram_aggregation._positive.at(0), + expected["at_0"], + ) + self.assertEqual( + exponential_histogram_aggregation._positive.at(1), + expected["at_1"], + ) + + def test_ascending_sequence(self): + + for max_size in [3, 4, 6, 9]: + for offset in range(-5, 6): + for init_scale in [0, 4]: + self.ascending_sequence_test(max_size, offset, init_scale) + + def ascending_sequence_test( + self, max_size: int, offset: int, init_scale: int + ): + + for step in range(max_size, max_size * 4): + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=max_size + ) + ) + + if init_scale <= 0: + mapping = ExponentMapping(init_scale) + else: + mapping = LogarithmMapping(init_scale) + + min_val = center_val(mapping, offset) + max_val = center_val(mapping, offset + step) + + sum_ = 0.0 + + for index in range(max_size): + value = center_val(mapping, offset + index) + exponential_histogram_aggregation.aggregate( + Measurement(value, Mock()) + ) + sum_ += value + + self.assertEqual( + init_scale, exponential_histogram_aggregation._scale + ) + self.assertEqual( + offset, exponential_histogram_aggregation._positive.offset() + ) + + exponential_histogram_aggregation.aggregate( + Measurement(max_val, Mock()) + ) + sum_ += max_val + + self.assertNotEqual( + 0, exponential_histogram_aggregation._positive.at(0) + ) + + # The maximum-index filled bucket is at or + # above the mid-point, (otherwise we + # downscaled too much). + + max_fill = 0 + total_count = 0 + + for index in range( + exponential_histogram_aggregation._positive.len() + ): + total_count += exponential_histogram_aggregation._positive.at( + index + ) + if exponential_histogram_aggregation._positive.at(index) != 0: + max_fill = index + + # FIXME the corresponding Go code is + # require.GreaterOrEqual(t, maxFill, uint32(maxSize)/2), make sure + # this is actually equivalent. + self.assertGreaterEqual(max_fill, int(max_size / 2)) + + self.assertGreaterEqual(max_size + 1, total_count) + self.assertGreaterEqual( + max_size + 1, exponential_histogram_aggregation._count + ) + self.assertGreaterEqual( + sum_, exponential_histogram_aggregation._sum + ) + + if init_scale <= 0: + mapping = ExponentMapping( + exponential_histogram_aggregation._scale + ) + else: + mapping = LogarithmMapping( + exponential_histogram_aggregation._scale + ) + index = mapping.map_to_index(min_val) + + self.assertEqual( + index, exponential_histogram_aggregation._positive.offset() + ) + + index = mapping.map_to_index(max_val) + + self.assertEqual( + index, + exponential_histogram_aggregation._positive.offset() + + exponential_histogram_aggregation._positive.len() + - 1, + ) + + def test_merge_simple_event(self): + + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + exponential_histogram_aggregation_2 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + exponential_histogram_aggregation_2._flag = True + + for index in range(4): + value_0 = 2 << index + value_1 = 1 / (1 << index) + + exponential_histogram_aggregation_0.aggregate( + Measurement(value_0, Mock()) + ) + exponential_histogram_aggregation_1.aggregate( + Measurement(value_1, Mock()) + ) + exponential_histogram_aggregation_2.aggregate( + Measurement(value_0, Mock()) + ) + exponential_histogram_aggregation_2.aggregate( + Measurement(value_1, Mock()) + ) + + self.assertEqual(0, exponential_histogram_aggregation_0._scale) + self.assertEqual(0, exponential_histogram_aggregation_1._scale) + self.assertEqual(-1, exponential_histogram_aggregation_2._scale) + + self.assertEqual( + 0, exponential_histogram_aggregation_0._positive.offset() + ) + self.assertEqual( + -4, exponential_histogram_aggregation_1._positive.offset() + ) + self.assertEqual( + -2, exponential_histogram_aggregation_2._positive.offset() + ) + + self.assertEqual( + [1, 1, 1, 1], + get_counts(exponential_histogram_aggregation_0._positive), + ) + self.assertEqual( + [1, 1, 1, 1], + get_counts(exponential_histogram_aggregation_1._positive), + ) + self.assertEqual( + [2, 2, 2, 2], + get_counts(exponential_histogram_aggregation_2._positive), + ) + + exponential_histogram_aggregation_0._merge_from( + exponential_histogram_aggregation_1 + ) + + self.assertEqual(-1, exponential_histogram_aggregation_0._scale) + self.assertEqual(-1, exponential_histogram_aggregation_2._scale) + + self.require_equal( + exponential_histogram_aggregation_0, + exponential_histogram_aggregation_2, + ) + + def test_merge_simple_odd(self): + + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + exponential_histogram_aggregation_2 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=4) + ) + exponential_histogram_aggregation_2._flag = True + + for index in range(4): + value_0 = 2 << index + value_1 = 2 / (1 << index) + + exponential_histogram_aggregation_0.aggregate( + Measurement(value_0, Mock()) + ) + exponential_histogram_aggregation_1.aggregate( + Measurement(value_1, Mock()) + ) + exponential_histogram_aggregation_2.aggregate( + Measurement(value_0, Mock()) + ) + exponential_histogram_aggregation_2.aggregate( + Measurement(value_1, Mock()) + ) + + self.assertEqual(4, exponential_histogram_aggregation_0._count) + self.assertEqual(4, exponential_histogram_aggregation_1._count) + self.assertEqual(8, exponential_histogram_aggregation_2._count) + + self.assertEqual(0, exponential_histogram_aggregation_0._scale) + self.assertEqual(0, exponential_histogram_aggregation_1._scale) + self.assertEqual(-1, exponential_histogram_aggregation_2._scale) + + self.assertEqual( + 0, exponential_histogram_aggregation_0._positive.offset() + ) + self.assertEqual( + -3, exponential_histogram_aggregation_1._positive.offset() + ) + self.assertEqual( + -2, exponential_histogram_aggregation_2._positive.offset() + ) + + self.assertEqual( + [1, 1, 1, 1], + get_counts(exponential_histogram_aggregation_0._positive), + ) + self.assertEqual( + [1, 1, 1, 1], + get_counts(exponential_histogram_aggregation_1._positive), + ) + self.assertEqual( + [1, 2, 3, 2], + get_counts(exponential_histogram_aggregation_2._positive), + ) + + exponential_histogram_aggregation_0._merge_from( + exponential_histogram_aggregation_1 + ) + + self.assertEqual(-1, exponential_histogram_aggregation_0._scale) + self.assertEqual(-1, exponential_histogram_aggregation_2._scale) + + self.require_equal( + exponential_histogram_aggregation_0, + exponential_histogram_aggregation_2, + ) + + def test_merge_exhaustive(self): + + factor = 1024.0 + count = 16 + + means = [0.0, factor] + stddevs = [1.0, factor] + + for mean in means: + for stddev in stddevs: + seed(77777677777) + + values = [] + + for _ in range(count): + # FIXME random() is not equivalent to the corresponding + # function in the Go implementation. + values.append(mean + random() * stddev) + + for partition in range(1, count): + + for size in [2, 6, 8, 9, 16]: + for incr in [ + int(1), + int(0x100), + int(0x10000), + int(0x100000000), + ]: + self._test_merge_exhaustive( + values[0:partition], + values[partition:count], + size, + incr, + ) + + def _test_merge_exhaustive( + self, + values_0: Sequence[float], + values_1: Sequence[float], + size: int, + incr: int, + ): + + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=size + ) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=size + ) + ) + exponential_histogram_aggregation_2 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=size + ) + ) + + for value_0 in values_0: + exponential_histogram_aggregation_0._update_by_incr(value_0, incr) + exponential_histogram_aggregation_2._update_by_incr(value_0, incr) + + for value_1 in values_1: + exponential_histogram_aggregation_1._update_by_incr(value_1, incr) + exponential_histogram_aggregation_2._update_by_incr(value_1, incr) + + exponential_histogram_aggregation_0._merge_from( + exponential_histogram_aggregation_1 + ) + + self.require_equal( + exponential_histogram_aggregation_2, + exponential_histogram_aggregation_0, + ) + + def test_integer_aggregation(self): + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=256 + ) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=256 + ) + ) + + expect = 0 + for index in range(2, 257): + expect += index + exponential_histogram_aggregation_0.aggregate( + Measurement(index, Mock()) + ) + exponential_histogram_aggregation_1.aggregate( + Measurement(index, Mock()) + ) + + self.assertEqual(expect, exponential_histogram_aggregation_0._sum) + self.assertEqual(255, exponential_histogram_aggregation_0._count) + + # Scale should be 5. The upper power-of-two is 256 == 2 ** 8. The + # exponential base 2 ** (2 ** -5) raised to the 256th power should be + # 256: + # 2 ** ((2 ** -5) * 256) = + # 2 ** ((2 ** -5) * (2 ** 8)) = + # 2 ** (2 ** 3) = + # 2 ** 8 + + scale = exponential_histogram_aggregation_0._scale + self.assertEqual(5, scale) + + def expect_0(buckets: Buckets): + self.assertEqual(0, buckets.len()) + + def expect_256(buckets: Buckets, factor: int): + # The minimum value 2 has index (1 << scale) - 1, which determines + # the length and the offset: + + self.assertEqual(256 - ((1 << scale) - 1), buckets.len()) + self.assertEqual((1 << scale) - 1, buckets.offset()) + + for index in range(256): + self.assertLessEqual(buckets.at(index), int(6 * factor)) + + expect_256(exponential_histogram_aggregation_0._positive, 1) + expect_0(exponential_histogram_aggregation_0._negative) + + exponential_histogram_aggregation_0._merge_from( + exponential_histogram_aggregation_1 + ) + expect_256(exponential_histogram_aggregation_0._positive, 2) + + self.assertEqual(2 * expect, exponential_histogram_aggregation_0._sum) + + exponential_histogram_aggregation_0._clear() + exponential_histogram_aggregation_1._clear() + + expect = 0 + + for index in range(2, 257): + expect -= index + + exponential_histogram_aggregation_0.aggregate( + Measurement(-index, Mock()) + ) + exponential_histogram_aggregation_1.aggregate( + Measurement(-index, Mock()) + ) + + self.assertEqual(expect, exponential_histogram_aggregation_0._sum) + self.assertEqual(255, exponential_histogram_aggregation_0._count) + + expect_256(exponential_histogram_aggregation_0._negative, 1) + expect_0(exponential_histogram_aggregation_0._positive) + + exponential_histogram_aggregation_0._merge_from( + exponential_histogram_aggregation_1 + ) + + expect_256(exponential_histogram_aggregation_0._negative, 2) + + self.assertEqual(2 * expect, exponential_histogram_aggregation_0._sum) + self.assertEqual(5, exponential_histogram_aggregation_0._scale) + + def test_reset(self): + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=256 + ) + ) + + for incr in [0x1, 0x100, 0x10000, 0x100000000, 0x200000000]: + exponential_histogram_aggregation._clear() + + self.assertEqual(0, exponential_histogram_aggregation._scale) + expect = 0 + + for index in range(2, 257): + expect += index * incr + exponential_histogram_aggregation._update_by_incr(index, incr) + + self.assertEqual(expect, exponential_histogram_aggregation._sum) + self.assertEqual( + 255 * incr, exponential_histogram_aggregation._count + ) + + # See test_integer_aggregation about why scale is 5, len is + # 256 - (1 << scale)- 1 and offset is (1 << scale) - 1. + scale = exponential_histogram_aggregation._scale + self.assertEqual(5, scale) + + self.assertEqual( + 256 - ((1 << scale) - 1), + exponential_histogram_aggregation._positive.len(), + ) + self.assertEqual( + (1 << scale) - 1, + exponential_histogram_aggregation._positive.offset(), + ) + + for index in range(0, 256): + self.assertLessEqual( + exponential_histogram_aggregation._positive.at(index), + 6 * incr, + ) + + def test_move_into(self): + + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=256 + ) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation( + Mock(), Mock(), max_size=256 + ) + ) + + expect = 0 + + for index in range(2, 257): + expect += index + exponential_histogram_aggregation_0.aggregate( + Measurement(index, Mock()) + ) + exponential_histogram_aggregation_0.aggregate( + Measurement(0, Mock()) + ) + + exponential_histogram_aggregation_0._swap( + exponential_histogram_aggregation_1 + ) + + self.assertEqual(0, exponential_histogram_aggregation_0._sum) + self.assertEqual(0, exponential_histogram_aggregation_0._count) + self.assertEqual(0, exponential_histogram_aggregation_0._zero_count) + self.assertEqual(0, exponential_histogram_aggregation_0._scale) + + self.assertEqual(expect, exponential_histogram_aggregation_1._sum) + self.assertEqual(255 * 2, exponential_histogram_aggregation_1._count) + self.assertEqual(255, exponential_histogram_aggregation_1._zero_count) + + scale = exponential_histogram_aggregation_1._scale + self.assertEqual(5, scale) + + self.assertEqual( + 256 - ((1 << scale) - 1), + exponential_histogram_aggregation_1._positive.len(), + ) + self.assertEqual( + (1 << scale) - 1, + exponential_histogram_aggregation_1._positive.offset(), + ) + + for index in range(0, 256): + self.assertLessEqual( + exponential_histogram_aggregation_1._positive.at(index), 6 + ) + + def test_very_large_numbers(self): + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=2) + ) + + def expect_balanced(count: int): + self.assertEqual( + 2, exponential_histogram_aggregation._positive.len() + ) + self.assertEqual( + -1, exponential_histogram_aggregation._positive.offset() + ) + self.assertEqual( + count, exponential_histogram_aggregation._positive.at(0) + ) + self.assertEqual( + count, exponential_histogram_aggregation._positive.at(1) + ) + + exponential_histogram_aggregation.aggregate( + Measurement(2**-100, Mock()) + ) + exponential_histogram_aggregation.aggregate( + Measurement(2**100, Mock()) + ) + + self.assertLessEqual( + 2**100, (exponential_histogram_aggregation._sum * (1 + 1e-5)) + ) + self.assertGreaterEqual( + 2**100, (exponential_histogram_aggregation._sum * (1 - 1e-5)) + ) + + self.assertEqual(2, exponential_histogram_aggregation._count) + self.assertEqual(-7, exponential_histogram_aggregation._scale) + + expect_balanced(1) + + exponential_histogram_aggregation.aggregate( + Measurement(2**-127, Mock()) + ) + exponential_histogram_aggregation.aggregate( + Measurement(2**128, Mock()) + ) + + self.assertLessEqual( + 2**128, (exponential_histogram_aggregation._sum * (1 + 1e-5)) + ) + self.assertGreaterEqual( + 2**128, (exponential_histogram_aggregation._sum * (1 - 1e-5)) + ) + + self.assertEqual(4, exponential_histogram_aggregation._count) + self.assertEqual(-7, exponential_histogram_aggregation._scale) + + expect_balanced(2) + + exponential_histogram_aggregation.aggregate( + Measurement(2**-129, Mock()) + ) + exponential_histogram_aggregation.aggregate( + Measurement(2**255, Mock()) + ) + + self.assertLessEqual( + 2**255, (exponential_histogram_aggregation._sum * (1 + 1e-5)) + ) + self.assertGreaterEqual( + 2**255, (exponential_histogram_aggregation._sum * (1 - 1e-5)) + ) + self.assertEqual(6, exponential_histogram_aggregation._count) + self.assertEqual(-8, exponential_histogram_aggregation._scale) + + expect_balanced(3) + + def test_full_range(self): + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock(), max_size=2) + ) + + exponential_histogram_aggregation.aggregate( + Measurement(float_info.max, Mock()) + ) + exponential_histogram_aggregation.aggregate(Measurement(1, Mock())) + exponential_histogram_aggregation.aggregate( + Measurement(2**-1074, Mock()) + ) + + self.assertEqual( + float_info.max, exponential_histogram_aggregation._sum + ) + self.assertEqual(3, exponential_histogram_aggregation._count) + self.assertEqual( + ExponentMapping._min_scale, + exponential_histogram_aggregation._scale + ) + + self.assertEqual( + _ExponentialBucketHistogramAggregation._min_max_size, + exponential_histogram_aggregation._positive.len() + ) + self.assertEqual( + -1, exponential_histogram_aggregation._positive.offset() + ) + self.assertLessEqual( + exponential_histogram_aggregation._positive.at(0), 2 + ) + self.assertLessEqual( + exponential_histogram_aggregation._positive.at(1), 1 + ) + + def test_aggregator_min_max(self): + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + + for value in [1, 3, 5, 7, 9]: + exponential_histogram_aggregation.aggregate( + Measurement(value, Mock()) + ) + + self.assertEqual(1, exponential_histogram_aggregation._min) + self.assertEqual(9, exponential_histogram_aggregation._max) + + exponential_histogram_aggregation = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + + for value in [-1, -3, -5, -7, -9]: + exponential_histogram_aggregation.aggregate( + Measurement(value, Mock()) + ) + + self.assertEqual(-9, exponential_histogram_aggregation._min) + self.assertEqual(-1, exponential_histogram_aggregation._max) + + def test_aggregator_copy_swap(self): + + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + for value in [1, 3, 5, 7, 9, -1, -3, -5]: + exponential_histogram_aggregation_0.aggregate( + Measurement(value, Mock()) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + for value in [5, 4, 3, 2]: + exponential_histogram_aggregation_1.aggregate( + Measurement(value, Mock()) + ) + exponential_histogram_aggregation_2 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + + exponential_histogram_aggregation_0._swap( + exponential_histogram_aggregation_1 + ) + exponential_histogram_aggregation_1._copy_into( + exponential_histogram_aggregation_2 + ) + + self.require_equal( + exponential_histogram_aggregation_1, + exponential_histogram_aggregation_2, + ) + + def test_zero_count_by_incr(self): + + exponential_histogram_aggregation_0 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + for _ in range(10): + exponential_histogram_aggregation_0.aggregate( + Measurement(0, Mock()) + ) + exponential_histogram_aggregation_1 = ( + _ExponentialBucketHistogramAggregation(Mock(), Mock()) + ) + + exponential_histogram_aggregation_1._update_by_incr(0, 10) + + self.require_equal( + exponential_histogram_aggregation_0, + exponential_histogram_aggregation_1, + ) + + def test_boundary_statistics(self): + + total = MAX_NORMAL_EXPONENT - MIN_NORMAL_EXPONENT + 1 + + for scale in range( + LogarithmMapping._min_scale, LogarithmMapping._max_scale + 1 + ): + + above = 0 + below = 0 + + if scale <= 0: + mapping = ExponentMapping(scale) + else: + mapping = LogarithmMapping(scale) + + for exp in range(MIN_NORMAL_EXPONENT, MAX_NORMAL_EXPONENT + 1): + value = ldexp(1, exp) + + index = mapping.map_to_index(value) + + try: + boundary = mapping.get_lower_boundary(index + 1) + except Exception as error: + raise error + self.fail(f"Unexpected exception {error} raised") + + if boundary < value: + above += 1 + elif boundary > value: + below += 1 + + self.assertInEpsilon(0.5, above / total, 0.05) + self.assertInEpsilon(0.5, below / total, 0.06) + + def test_power_of_two_rounded_up(self): + + power_of_two_rounded_up = ( + _ExponentialBucketHistogramAggregation._power_of_two_rounded_up + ) + + self.assertEqual(power_of_two_rounded_up(2), 2) + self.assertEqual(power_of_two_rounded_up(4), 4) + self.assertEqual(power_of_two_rounded_up(8), 8) + self.assertEqual(power_of_two_rounded_up(16), 16) + self.assertEqual(power_of_two_rounded_up(32), 32) + + self.assertEqual(power_of_two_rounded_up(3), 4) + self.assertEqual(power_of_two_rounded_up(5), 8) + self.assertEqual(power_of_two_rounded_up(9), 16) + self.assertEqual(power_of_two_rounded_up(17), 32) + self.assertEqual(power_of_two_rounded_up(33), 64)