diff --git a/opencensus/stats/aggregation.py b/opencensus/stats/aggregation.py index bebe6ca66..f2bdc10ec 100644 --- a/opencensus/stats/aggregation.py +++ b/opencensus/stats/aggregation.py @@ -133,7 +133,7 @@ def __init__( self._boundaries = bucket_boundaries.BucketBoundaries(boundaries) self._distribution = distribution or {} self.aggregation_data = aggregation_data.DistributionAggregationData( - 0, 0, 0, 0, 0, None, boundaries) + 0, 0, float('inf'), float('-inf'), 0, None, boundaries) @property def boundaries(self): diff --git a/opencensus/stats/aggregation_data.py b/opencensus/stats/aggregation_data.py index 74c61ab4d..5e79ee9fe 100644 --- a/opencensus/stats/aggregation_data.py +++ b/opencensus/stats/aggregation_data.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from opencensus.stats import bucket_boundaries @@ -21,6 +22,7 @@ class BaseAggregationData(object): :param aggregation_data: represents the aggregated value from a collection """ + def __init__(self, aggregation_data): self._aggregation_data = aggregation_data @@ -37,6 +39,7 @@ class SumAggregationDataFloat(BaseAggregationData): :param sum_data: represents the aggregated sum """ + def __init__(self, sum_data): super(SumAggregationDataFloat, self).__init__(sum_data) self._sum_data = sum_data @@ -60,6 +63,7 @@ class CountAggregationData(BaseAggregationData): :param count_data: represents the aggregated count """ + def __init__(self, count_data): super(CountAggregationData, self).__init__(count_data) self._count_data = count_data @@ -104,6 +108,7 @@ class DistributionAggregationData(BaseAggregationData): :param bounds: the histogram distribution of the values """ + def __init__(self, mean_data, count_data, @@ -123,13 +128,14 @@ def __init__(self, bounds = [] if counts_per_bucket is None: - counts_per_bucket = [] - bucket_size = len(bounds) + 1 - for i in range(bucket_size): - counts_per_bucket.append(0) + counts_per_bucket = [0 for ii in range(len(bounds) + 1)] + elif len(counts_per_bucket) != len(bounds) + 1: + raise ValueError("counts_per_bucket length does not match bounds " + "length") + self._counts_per_bucket = counts_per_bucket self._bounds = bucket_boundaries.BucketBoundaries( - boundaries=bounds).boundaries + boundaries=bounds).boundaries bucket = 0 for _ in self.bounds: bucket = bucket + 1 @@ -207,30 +213,24 @@ def add_sample(self, value, timestamp, attachments): old_mean = self._mean_data self._mean_data = self._mean_data + ( - (value - self._mean_data) / self._count_data) + (value - self._mean_data) / self._count_data) self._sum_of_sqd_deviations = self._sum_of_sqd_deviations + ( - (value - old_mean) * - (value - self._mean_data)) + (value - old_mean) * (value - self._mean_data)) def increment_bucket_count(self, value): """Increment the bucket count based on a given value from the user""" - i = 0 - incremented = False - for b in self._bounds: - if value < b and not incremented: - self._counts_per_bucket[i] += 1 - incremented = True - i += 1 - - if incremented: - return i - if len(self._bounds) == 0: self._counts_per_bucket[0] += 1 - return i + return 0 - self._counts_per_bucket[(len(self._bounds))-1] += 1 - return i + for ii, bb in enumerate(self._bounds): + if value < bb: + self._counts_per_bucket[ii] += 1 + return ii + else: + last_bucket_index = len(self._bounds) + self._counts_per_bucket[last_bucket_index] += 1 + return last_bucket_index class LastValueAggregationData(BaseAggregationData): @@ -241,6 +241,7 @@ class LastValueAggregationData(BaseAggregationData): :param value: represents the current value """ + def __init__(self, value): super(LastValueAggregationData, self).__init__(value) self._value = value @@ -271,10 +272,7 @@ class Exemplar(object): :param attachments: the contextual information about the example value. """ - def __init__(self, - value, - timestamp, - attachments): + def __init__(self, value, timestamp, attachments): self._value = value self._timestamp = timestamp diff --git a/tests/unit/stats/test_aggregation.py b/tests/unit/stats/test_aggregation.py index cc7e6bff3..71b2d546d 100644 --- a/tests/unit/stats/test_aggregation.py +++ b/tests/unit/stats/test_aggregation.py @@ -12,17 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime import unittest -import mock + from opencensus.stats import aggregation as aggregation_module class TestBaseAggregation(unittest.TestCase): - def test_constructor_defaults(self): base_aggregation = aggregation_module.BaseAggregation() - self.assertEqual(aggregation_module.Type.NONE, base_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.NONE, + base_aggregation.aggregation_type) self.assertEqual([], base_aggregation.buckets) def test_constructor_explicit(self): @@ -30,17 +31,18 @@ def test_constructor_explicit(self): buckets = ["test"] base_aggregation = aggregation_module.BaseAggregation(buckets=buckets) - self.assertEqual(aggregation_module.Type.NONE, base_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.NONE, + base_aggregation.aggregation_type) self.assertEqual(["test"], base_aggregation.buckets) class TestSumAggregation(unittest.TestCase): - def test_constructor_defaults(self): sum_aggregation = aggregation_module.SumAggregation() self.assertEqual(0, sum_aggregation.sum.sum_data) - self.assertEqual(aggregation_module.Type.SUM, sum_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.SUM, + sum_aggregation.aggregation_type) def test_constructor_explicit(self): sum = 1 @@ -48,16 +50,17 @@ def test_constructor_explicit(self): sum_aggregation = aggregation_module.SumAggregation(sum=sum) self.assertEqual(1, sum_aggregation.sum.sum_data) - self.assertEqual(aggregation_module.Type.SUM, sum_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.SUM, + sum_aggregation.aggregation_type) class TestCountAggregation(unittest.TestCase): - def test_constructor_defaults(self): count_aggregation = aggregation_module.CountAggregation() self.assertEqual(0, count_aggregation.count.count_data) - self.assertEqual(aggregation_module.Type.COUNT, count_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.COUNT, + count_aggregation.aggregation_type) def test_constructor_explicit(self): count = 4 @@ -65,39 +68,57 @@ def test_constructor_explicit(self): count_aggregation = aggregation_module.CountAggregation(count=count) self.assertEqual(4, count_aggregation.count.count_data) - self.assertEqual(aggregation_module.Type.COUNT, count_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.COUNT, + count_aggregation.aggregation_type) class TestLastValueAggregation(unittest.TestCase): - def test_constructor_defaults(self): last_value_aggregation = aggregation_module.LastValueAggregation() self.assertEqual(0, last_value_aggregation.value) - self.assertEqual(aggregation_module.Type.LASTVALUE, last_value_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.LASTVALUE, + last_value_aggregation.aggregation_type) def test_constructor_explicit(self): val = 16 - last_value_aggregation = aggregation_module.LastValueAggregation(value=val) + last_value_aggregation = aggregation_module.LastValueAggregation( + value=val) self.assertEqual(16, last_value_aggregation.value) - self.assertEqual(aggregation_module.Type.LASTVALUE, last_value_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.LASTVALUE, + last_value_aggregation.aggregation_type) class TestDistributionAggregation(unittest.TestCase): - def test_constructor_defaults(self): distribution_aggregation = aggregation_module.DistributionAggregation() self.assertEqual([], distribution_aggregation.boundaries.boundaries) self.assertEqual({}, distribution_aggregation.distribution) - self.assertEqual(aggregation_module.Type.DISTRIBUTION, distribution_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.DISTRIBUTION, + distribution_aggregation.aggregation_type) def test_constructor_explicit(self): boundaries = ["test"] distribution = {1: "test"} - distribution_aggregation = aggregation_module.DistributionAggregation(boundaries=boundaries, distribution=distribution) + distribution_aggregation = aggregation_module.DistributionAggregation( + boundaries=boundaries, distribution=distribution) - self.assertEqual(["test"], distribution_aggregation.boundaries.boundaries) + self.assertEqual(["test"], + distribution_aggregation.boundaries.boundaries) self.assertEqual({1: "test"}, distribution_aggregation.distribution) - self.assertEqual(aggregation_module.Type.DISTRIBUTION, distribution_aggregation.aggregation_type) + self.assertEqual(aggregation_module.Type.DISTRIBUTION, + distribution_aggregation.aggregation_type) + + def test_min_max(self): + da = aggregation_module.DistributionAggregation([]) + + self.assertEqual(da.aggregation_data.min, float('inf')) + self.assertEqual(da.aggregation_data.max, float('-inf')) + + for dp in range(-10, 11): + da.aggregation_data.add_sample(dp, datetime(1999, 12, 31), {}) + + self.assertEqual(da.aggregation_data.min, -10) + self.assertEqual(da.aggregation_data.max, 10) diff --git a/tests/unit/stats/test_aggregation_data.py b/tests/unit/stats/test_aggregation_data.py index 2149357d5..30766e4c2 100644 --- a/tests/unit/stats/test_aggregation_data.py +++ b/tests/unit/stats/test_aggregation_data.py @@ -12,14 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest import time +import unittest + import mock + from opencensus.stats import aggregation_data as aggregation_data_module class TestBaseAggregationData(unittest.TestCase): - def test_constructor(self): aggregation_data = 0 base_aggregation_data = aggregation_data_module.BaseAggregationData( @@ -29,7 +30,6 @@ def test_constructor(self): class TestSumAggregationData(unittest.TestCase): - def test_constructor(self): sum_data = 1 sum_aggregation_data = aggregation_data_module.SumAggregationDataFloat( @@ -48,7 +48,6 @@ def test_add_sample(self): class TestCountAggregationData(unittest.TestCase): - def test_constructor(self): count_data = 0 count_aggregation_data = aggregation_data_module.CountAggregationData( @@ -64,44 +63,42 @@ def test_add_sample(self): self.assertEqual(1, count_aggregation_data.count_data) + class TestLastValueAggregationData(unittest.TestCase): - def test_constructor(self): value_data = 0 - last_value_aggregation_data = aggregation_data_module.LastValueAggregationData( - value=value_data) + last_value_aggregation_data =\ + aggregation_data_module.LastValueAggregationData(value=value_data) self.assertEqual(0, last_value_aggregation_data.value) def test_overwrite_sample(self): first_data = 0 - last_value_aggregation_data = aggregation_data_module.LastValueAggregationData( - value=first_data) + last_value_aggregation_data =\ + aggregation_data_module.LastValueAggregationData(value=first_data) self.assertEqual(0, last_value_aggregation_data.value) last_value_aggregation_data.add_sample(1, None, None) self.assertEqual(1, last_value_aggregation_data.value) class TestDistributionAggregationData(unittest.TestCase): - def test_constructor(self): mean_data = 1 count_data = 0 _min = 0 _max = 1 sum_of_sqd_deviations = mock.Mock() - counts_per_bucket = [1, 1, 1] - bounds = [0, 1/2, 1] + counts_per_bucket = [1, 1, 1, 1] + bounds = [0, 1.0 / 2.0, 1] dist_agg_data = aggregation_data_module.DistributionAggregationData( - mean_data=mean_data, - count_data=count_data, - min_= _min, - max_ = _max, - sum_of_sqd_deviations=sum_of_sqd_deviations, - counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + mean_data=mean_data, + count_data=count_data, + min_=_min, + max_=_max, + sum_of_sqd_deviations=sum_of_sqd_deviations, + counts_per_bucket=counts_per_bucket, + bounds=bounds) self.assertEqual(1, dist_agg_data.mean_data) self.assertEqual(0, dist_agg_data.count_data) @@ -109,36 +106,59 @@ def test_constructor(self): self.assertEqual(1, dist_agg_data.max) self.assertEqual(sum_of_sqd_deviations, dist_agg_data.sum_of_sqd_deviations) - self.assertEqual([1, 1, 1], dist_agg_data.counts_per_bucket) - self.assertEqual([0, 1/2, 1], dist_agg_data.bounds) + self.assertEqual([1, 1, 1, 1], dist_agg_data.counts_per_bucket) + self.assertEqual([0, 1.0 / 2.0, 1], dist_agg_data.bounds) self.assertIsNotNone(dist_agg_data.sum) self.assertEqual(0, dist_agg_data.variance) + def test_init_bad_bucket_counts(self): + # Check that len(counts_per_bucket) == len(bounds) + 1 + with self.assertRaises(ValueError): + aggregation_data_module.DistributionAggregationData( + mean_data=mock.Mock(), + count_data=mock.Mock(), + min_=mock.Mock(), + max_=mock.Mock(), + sum_of_sqd_deviations=mock.Mock(), + counts_per_bucket=[0, 0, 0], + bounds=[0, 1, 2]) + + # And check that we don't throw given the right args + aggregation_data_module.DistributionAggregationData( + mean_data=mock.Mock(), + count_data=mock.Mock(), + min_=mock.Mock(), + max_=mock.Mock(), + sum_of_sqd_deviations=mock.Mock(), + counts_per_bucket=[0, 0, 0, 0], + bounds=[0, 1, 2]) + def test_constructor_with_exemplar(self): timestamp = time.time() attachments = {"One": "one", "Two": "two"} - exemplar_1 = aggregation_data_module.Exemplar(4, timestamp, attachments) - exemplar_2 = aggregation_data_module.Exemplar(5, timestamp, attachments) + exemplar_1 = aggregation_data_module.Exemplar(4, timestamp, + attachments) + exemplar_2 = aggregation_data_module.Exemplar(5, timestamp, + attachments) mean_data = 1 count_data = 0 _min = 0 _max = 1 sum_of_sqd_deviations = mock.Mock() - counts_per_bucket = [1, 1, 1] - bounds = [0, 1/2, 1] + counts_per_bucket = [1, 1, 1, 1] + bounds = [0, 1.0 / 2.0, 1] exemplars = [exemplar_1, exemplar_2] dist_agg_data = aggregation_data_module.DistributionAggregationData( - mean_data=mean_data, - count_data=count_data, - min_= _min, - max_ = _max, - sum_of_sqd_deviations=sum_of_sqd_deviations, - exemplars=exemplars, - counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + mean_data=mean_data, + count_data=count_data, + min_=_min, + max_=_max, + sum_of_sqd_deviations=sum_of_sqd_deviations, + exemplars=exemplars, + counts_per_bucket=counts_per_bucket, + bounds=bounds) self.assertEqual(1, dist_agg_data.mean_data) self.assertEqual(0, dist_agg_data.count_data) @@ -146,9 +166,9 @@ def test_constructor_with_exemplar(self): self.assertEqual(1, dist_agg_data.max) self.assertEqual(sum_of_sqd_deviations, dist_agg_data.sum_of_sqd_deviations) - self.assertEqual([1, 1, 1], dist_agg_data.counts_per_bucket) + self.assertEqual([1, 1, 1, 1], dist_agg_data.counts_per_bucket) self.assertEqual([exemplar_1, exemplar_2], dist_agg_data.exemplars[3]) - self.assertEqual([0, 1/2, 1], dist_agg_data.bounds) + self.assertEqual([0, 1.0 / 2.0, 1], dist_agg_data.bounds) self.assertIsNotNone(dist_agg_data.sum) self.assertEqual(0, dist_agg_data.variance) @@ -165,35 +185,44 @@ def test_exemplar(self): def test_exemplar_null_attachments(self): timestamp = time.time() - with self.assertRaisesRegexp(TypeError, 'attachments should not be empty'): + with self.assertRaisesRegexp(TypeError, + 'attachments should not be empty'): aggregation_data_module.Exemplar(6, timestamp, None) def test_exemplar_null_attachment_key(self): timestamp = time.time() attachment = {None: "one", "Two": "two"} - with self.assertRaisesRegexp(TypeError, 'attachment key should not be empty and should be a string'): + with self.assertRaisesRegexp( + TypeError, + 'attachment key should not be empty and should be a string'): aggregation_data_module.Exemplar(6, timestamp, attachment) def test_exemplar_null_attachment_value(self): timestamp = time.time() attachment = {"One": "one", "Two": None} - with self.assertRaisesRegexp(TypeError, 'attachment value should not be empty and should be a string'): + with self.assertRaisesRegexp( + TypeError, + 'attachment value should not be empty and should be a string'): aggregation_data_module.Exemplar(6, timestamp, attachment) def test_exemplar_int_attachment_key(self): timestamp = time.time() attachment = {1: "one", "Two": "two"} - with self.assertRaisesRegexp(TypeError, 'attachment key should not be empty and should be a string'): + with self.assertRaisesRegexp( + TypeError, + 'attachment key should not be empty and should be a string'): aggregation_data_module.Exemplar(6, timestamp, attachment) def test_exemplar_int_attachment_value(self): timestamp = time.time() attachment = {"One": "one", "Two": 2} - with self.assertRaisesRegexp(TypeError, 'attachment value should not be empty and should be a string'): + with self.assertRaisesRegexp( + TypeError, + 'attachment value should not be empty and should be a string'): aggregation_data_module.Exemplar(6, timestamp, attachment) def test_variance(self): @@ -202,17 +231,16 @@ def test_variance(self): _min = mock.Mock() _max = mock.Mock() sum_of_sqd_deviations = mock.Mock() - counts_per_bucket = [1, 1, 1] - bounds = [0, 1/2, 1] + counts_per_bucket = [1, 1, 1, 1] + bounds = [0, 1.0 / 2.0, 1] dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) self.assertEqual(0, dist_agg_data.variance) count_data = 2 @@ -220,12 +248,11 @@ def test_variance(self): dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) self.assertEqual(2.0, dist_agg_data.variance) def test_add_sample(self): @@ -234,7 +261,7 @@ def test_add_sample(self): _min = 0 _max = 1 sum_of_sqd_deviations = 2 - counts_per_bucket = [1, 1, 1, 1] + counts_per_bucket = [1, 1, 1, 1, 1] bounds = [0, 0.5, 1, 1.5] value = 3 @@ -242,12 +269,11 @@ def test_add_sample(self): dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) dist_agg_data.add_sample(value, None, None) self.assertEqual(0, dist_agg_data.min) @@ -259,12 +285,11 @@ def test_add_sample(self): dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) dist_agg_data.add_sample(value, None, None) self.assertEqual(2, dist_agg_data.count_data) @@ -282,26 +307,26 @@ def test_add_sample_attachment(self): _min = 0 _max = 1 sum_of_sqd_deviations = 2 - counts_per_bucket = [1, 1, 1, 1] + counts_per_bucket = [1, 1, 1, 1, 1] bounds = [0, 0.5, 1, 1.5] value = 3 timestamp = time.time() attachments = {"One": "one", "Two": "two"} - exemplar_1 = aggregation_data_module.Exemplar(4, timestamp, attachments) + exemplar_1 = aggregation_data_module.Exemplar(4, timestamp, + attachments) dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, bounds=bounds, - exemplars=exemplar_1 - ) + exemplars=exemplar_1) - self.assertEqual({4:exemplar_1}, dist_agg_data.exemplars) + self.assertEqual({4: exemplar_1}, dist_agg_data.exemplars) dist_agg_data.add_sample(value, timestamp, attachments) self.assertEqual(0, dist_agg_data.min) @@ -314,19 +339,18 @@ def test_add_sample_attachment(self): dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, - counts_per_bucket=[2, 1, 2, 1, 1], - bounds=[1, 2, 3, 4, 5] - ) + counts_per_bucket=[2, 1, 2, 1, 1, 1], + bounds=[1, 2, 3, 4, 5]) dist_agg_data.add_sample(value, timestamp, attachments) self.assertEqual(5, dist_agg_data.count_data) self.assertEqual(1.4, dist_agg_data.mean_data) self.assertEqual(5.2, dist_agg_data.sum_of_sqd_deviations) self.assertIsNot(0, dist_agg_data.count_data) - self.assertEqual(3, dist_agg_data.exemplars[5].value) + self.assertEqual(3, dist_agg_data.exemplars[3].value) def test_increment_bucket_count(self): mean_data = mock.Mock() @@ -342,43 +366,40 @@ def test_increment_bucket_count(self): dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) dist_agg_data.increment_bucket_count(value=value) self.assertEqual([1], dist_agg_data.counts_per_bucket) - counts_per_bucket = [1, 1] - bounds = [1/4, 3/2] + counts_per_bucket = [1, 1, 1] + bounds = [1.0 / 4.0, 3.0 / 2.0] dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) dist_agg_data.increment_bucket_count(value=value) - self.assertEqual([1, 2], dist_agg_data.counts_per_bucket) + self.assertEqual([1, 2, 1], dist_agg_data.counts_per_bucket) - bounds = [1/4, 1/2] + bounds = [1.0 / 4.0, 1.0 / 2.0] dist_agg_data = aggregation_data_module.DistributionAggregationData( mean_data=mean_data, count_data=count_data, - min_= _min, - max_ = _max, + min_=_min, + max_=_max, sum_of_sqd_deviations=sum_of_sqd_deviations, counts_per_bucket=counts_per_bucket, - bounds=bounds - ) + bounds=bounds) dist_agg_data.increment_bucket_count(value=value) - self.assertEqual([1, 3], dist_agg_data.counts_per_bucket) + self.assertEqual([1, 2, 2], dist_agg_data.counts_per_bucket) diff --git a/tests/unit/stats/test_measure_to_view_map.py b/tests/unit/stats/test_measure_to_view_map.py index d96d77ab2..738bf9133 100644 --- a/tests/unit/stats/test_measure_to_view_map.py +++ b/tests/unit/stats/test_measure_to_view_map.py @@ -13,15 +13,14 @@ # limitations under the License. import unittest + import mock -import logging -from datetime import datetime -from opencensus.stats.view import View -from opencensus.stats.view_data import ViewData -from opencensus.stats.measurement import Measurement + +from opencensus.stats import measure_to_view_map as measure_to_view_map_module from opencensus.stats.measure import BaseMeasure from opencensus.stats.measure import MeasureInt -from opencensus.stats import measure_to_view_map as measure_to_view_map_module +from opencensus.stats.view import View +from opencensus.stats.view_data import ViewData class TestMeasureToViewMap(unittest.TestCase): @@ -98,7 +97,6 @@ def test_filter_exported_views(self): columns=columns, measure=measure, aggregation=aggregation) - print("test view 1", test_view_1) test_view_2_name = "testView2" test_view_2 = View( @@ -107,13 +105,10 @@ def test_filter_exported_views(self): columns=columns, measure=measure, aggregation=aggregation) - print("test view 2", test_view_2) all_the_views = {test_view_1, test_view_2} - print("all the views", all_the_views) measure_to_view_map = measure_to_view_map_module.MeasureToViewMap() views = measure_to_view_map.filter_exported_views( all_views=all_the_views) - print("filtered views", views) self.assertEqual(views, all_the_views) def test_register_view(self): @@ -139,9 +134,8 @@ def test_register_view(self): view) self.assertEqual( measure_to_view_map._registered_measures[measure.name], measure) - self.assertIsNotNone( - measure_to_view_map._measure_to_view_data_list_map[ - view.measure.name]) + self.assertIsNotNone(measure_to_view_map. + _measure_to_view_data_list_map[view.measure.name]) # Registers a view with an existing measure. view2 = View( @@ -154,10 +148,11 @@ def test_register_view(self): view=view2, timestamp=timestamp) self.assertIsNone(test_with_registered_measures) self.assertEqual( - measure_to_view_map._registered_measures[measure.name], measure) + measure_to_view_map._registered_measures[measure.name], measure) - # Registers a view with a measure that has the same name as an existing measure, - # but with different schema. measure2 and view3 should be ignored. + # Registers a view with a measure that has the same name as an existing + # measure, but with different schema. measure2 and view3 should be + # ignored. measure2 = MeasureInt("measure", "another measure", "ms") view3 = View( name="testView3", @@ -169,7 +164,7 @@ def test_register_view(self): view=view3, timestamp=timestamp) self.assertIsNone(test_with_registered_measures) self.assertEqual( - measure_to_view_map._registered_measures[measure2.name], measure) + measure_to_view_map._registered_measures[measure2.name], measure) measure_to_view_map._registered_measures = {measure.name: None} self.assertIsNone( @@ -178,17 +173,15 @@ def test_register_view(self): # view is already registered, measure will not be registered again. self.assertIsNone( measure_to_view_map._registered_measures.get(measure.name)) - self.assertIsNotNone( - measure_to_view_map._measure_to_view_data_list_map[ - view.measure.name]) + self.assertIsNotNone(measure_to_view_map. + _measure_to_view_data_list_map[view.measure.name]) measure_to_view_map._registered_views = {name: view} test_result_1 = measure_to_view_map.register_view( view=view, timestamp=timestamp) self.assertIsNone(test_result_1) - self.assertIsNotNone( - measure_to_view_map._measure_to_view_data_list_map[ - view.measure.name]) + self.assertIsNotNone(measure_to_view_map. + _measure_to_view_data_list_map[view.measure.name]) def test_register_view_with_exporter(self): exporter = mock.Mock() @@ -214,9 +207,8 @@ def test_register_view_with_exporter(self): view) self.assertEqual( measure_to_view_map._registered_measures[measure.name], measure) - self.assertIsNotNone( - measure_to_view_map._measure_to_view_data_list_map[ - view.measure.name]) + self.assertIsNotNone(measure_to_view_map. + _measure_to_view_data_list_map[view.measure.name]) def test_record(self): measure_name = "test_measure" @@ -229,12 +221,11 @@ def test_record(self): view_columns = ["testTag1", "testColumn2"] view_measure = measure view_aggregation = mock.Mock() - view = View( - name=view_name, - description=view_description, - columns=view_columns, - measure=view_measure, - aggregation=view_aggregation) + View(name=view_name, + description=view_description, + columns=view_columns, + measure=view_measure, + aggregation=view_aggregation) measure_value = 5 tags = {"testTag1": "testTag1Value"} @@ -244,7 +235,10 @@ def test_record(self): measure_to_view_map = measure_to_view_map_module.MeasureToViewMap() measure_to_view_map._registered_measures = {} record = measure_to_view_map.record( - tags=tags, measurement_map=measurement_map, timestamp=timestamp, attachments=None) + tags=tags, + measurement_map=measurement_map, + timestamp=timestamp, + attachments=None) self.assertNotEqual( measure, measure_to_view_map._registered_measures.get(measure.name)) @@ -253,7 +247,10 @@ def test_record(self): measure_to_view_map._registered_measures = {measure.name: measure} measure_to_view_map._measure_to_view_data_list_map = {} record = measure_to_view_map.record( - tags=tags, measurement_map=measurement_map, timestamp=timestamp, attachments=None) + tags=tags, + measurement_map=measurement_map, + timestamp=timestamp, + attachments=None) self.assertEqual( measure, measure_to_view_map._registered_measures.get(measure.name)) @@ -263,7 +260,10 @@ def test_record(self): measure.name: [mock.Mock()] } measure_to_view_map.record( - tags=tags, measurement_map=measurement_map, timestamp=timestamp, attachments=None) + tags=tags, + measurement_map=measurement_map, + timestamp=timestamp, + attachments=None) self.assertEqual( measure, measure_to_view_map._registered_measures.get(measure.name)) @@ -274,9 +274,12 @@ def test_record(self): "testing": [mock.Mock()] } measure_to_view_map.record( - tags=tags, measurement_map=measurement_map, timestamp=timestamp, attachments=None) - self.assertTrue(measure.name not in - measure_to_view_map._measure_to_view_data_list_map) + tags=tags, + measurement_map=measurement_map, + timestamp=timestamp, + attachments=None) + self.assertTrue(measure.name not in measure_to_view_map. + _measure_to_view_data_list_map) measure_to_view_map_mock = mock.Mock() measure_to_view_map = measure_to_view_map_mock @@ -294,41 +297,10 @@ def test_record(self): measurement_map = {} measure_to_view_map = measure_to_view_map_module.MeasureToViewMap() record = measure_to_view_map.record( - tags=tags, measurement_map=measurement_map, timestamp=timestamp, attachments=None) - self.assertIsNone(record) - - def test_record_with_exporter(self): - exporter = mock.Mock() - measure_name = "test_measure" - measure_description = "test_description" - measure = BaseMeasure( - name=measure_name, description=measure_description) - - view_name = "test_view" - view_description = "test_description" - view_columns = ["testTag1", "testColumn2"] - view_measure = measure - view_aggregation = mock.Mock() - view = View( - name=view_name, - description=view_description, - columns=view_columns, - measure=view_measure, - aggregation=view_aggregation) - - measure_value = 5 - tags = {"testTag1": "testTag1Value"} - measurement_map = {measure: measure_value} - timestamp = mock.Mock() - - measure_to_view_map = measure_to_view_map_module.MeasureToViewMap() - measure_to_view_map.exporters.append(exporter) - measure_to_view_map._registered_measures = {} - record = measure_to_view_map.record( - tags=tags, measurement_map=measurement_map, timestamp=timestamp) - self.assertNotEqual( - measure, - measure_to_view_map._registered_measures.get(measure.name)) + tags=tags, + measurement_map=measurement_map, + timestamp=timestamp, + attachments=None) self.assertIsNone(record) def test_record_with_exporter(self): @@ -343,7 +315,7 @@ def test_record_with_exporter(self): view_columns = ["testTag1", "testColumn2"] view_measure = measure view_aggregation = mock.Mock() - view = View( + View( name=view_name, description=view_description, columns=view_columns, @@ -378,18 +350,13 @@ def test_export(self): view_columns = ["testTag1", "testColumn2"] view_measure = measure view_aggregation = mock.Mock() - view = View( + View( name=view_name, description=view_description, columns=view_columns, measure=view_measure, aggregation=view_aggregation) - measure_value = 5 - tags = {"testTag1": "testTag1Value"} - measurement_map = {measure: measure_value} - timestamp = mock.Mock() - measure_to_view_map = measure_to_view_map_module.MeasureToViewMap() measure_to_view_map.exporters.append(exporter) measure_to_view_map._registered_measures = {} diff --git a/tests/unit/stats/test_view_data.py b/tests/unit/stats/test_view_data.py index b472f4e77..c442d1e8d 100644 --- a/tests/unit/stats/test_view_data.py +++ b/tests/unit/stats/test_view_data.py @@ -22,12 +22,12 @@ class TestViewData(unittest.TestCase): - def test_constructor(self): view = mock.Mock() start_time = datetime.utcnow() end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) self.assertEqual(view, view_data.view) self.assertEqual(start_time, view_data.start_time) @@ -38,9 +38,8 @@ def test_start(self): view = mock.Mock() start_time = mock.Mock() end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) view_data.start() self.assertIsNotNone(view_data.start_time) @@ -49,9 +48,8 @@ def test_end(self): view = mock.Mock() start_time = datetime.utcnow() end_time = mock.Mock() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) view_data.end() self.assertIsNotNone(view_data.end_time) @@ -60,9 +58,8 @@ def test_get_tag_values(self): view = mock.Mock() start_time = datetime.utcnow() end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) tags = {'testTag1': 'testVal1'} columns = ['testTag1'] @@ -80,9 +77,8 @@ def test_record(self): view.aggregation = mock.Mock() start_time = datetime.utcnow() end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) context = mock.Mock() context.map = {'key1': 'val1', 'key2': 'val2'} @@ -98,49 +94,63 @@ def test_record(self): self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) - self.assertIsNotNone(view_data.tag_value_aggregation_data_map[tuple_vals]) - self.assertIsNotNone(view_data.tag_value_aggregation_data_map.get( - tuple_vals).add(value)) + self.assertIsNotNone( + view_data.tag_value_aggregation_data_map[tuple_vals]) + self.assertIsNotNone( + view_data.tag_value_aggregation_data_map.get(tuple_vals).add( + value)) view_data.record(context=context, value=value, timestamp=time) tag_values.append('val2') tuple_vals_2 = tuple(['val2']) - self.assertFalse(tuple_vals_2 in view_data.tag_value_aggregation_data_map) - view_data.tag_value_aggregation_data_map[tuple_vals_2] = view.aggregation - self.assertEqual(view_data.tag_value_aggregation_data_map.get(tuple_vals_2), - view_data.view.aggregation) - self.assertIsNotNone(view_data.tag_value_aggregation_data_map.get( - tuple_vals_2).add(value)) + self.assertFalse( + tuple_vals_2 in view_data.tag_value_aggregation_data_map) + view_data.tag_value_aggregation_data_map[ + tuple_vals_2] = view.aggregation + self.assertEqual( + view_data.tag_value_aggregation_data_map.get(tuple_vals_2), + view_data.view.aggregation) + self.assertIsNotNone( + view_data.tag_value_aggregation_data_map.get(tuple_vals_2).add( + value)) def test_record_with_attachment(self): boundaries = [1, 2, 3] distribution = {1: "test"} - distribution_aggregation = aggregation_module.DistributionAggregation(boundaries=boundaries, - distribution=distribution) + distribution_aggregation = aggregation_module.DistributionAggregation( + boundaries=boundaries, distribution=distribution) name = "testName" description = "testMeasure" unit = "testUnit" - measure = measure_module.MeasureInt(name=name, description=description, unit=unit) + measure = measure_module.MeasureInt( + name=name, description=description, unit=unit) description = "testMeasure" columns = ["key1", "key2"] - view = view_module.View(name=name, description=description, columns=columns, measure=measure, - aggregation=distribution_aggregation) + view = view_module.View( + name=name, + description=description, + columns=columns, + measure=measure, + aggregation=distribution_aggregation) start_time = datetime.utcnow() attachments = {"One": "one", "Two": "two"} end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) context = mock.Mock context.map = {'key1': 'val1', 'key2': 'val2'} time = datetime.utcnow().isoformat() + 'Z' value = 1 - view_data.record(context=context, value=value, timestamp=time, attachments=attachments) + view_data.record( + context=context, + value=value, + timestamp=time, + attachments=attachments) tag_values = view_data.get_tag_values( tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) @@ -148,38 +158,48 @@ def test_record_with_attachment(self): self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) - self.assertIsNotNone(view_data.tag_value_aggregation_data_map[tuple_vals]) - self.assertEqual(attachments, view_data. - tag_value_aggregation_data_map[tuple_vals].exemplars[3].attachments) + self.assertIsNotNone( + view_data.tag_value_aggregation_data_map[tuple_vals]) + self.assertEqual( + attachments, view_data.tag_value_aggregation_data_map[tuple_vals]. + exemplars[1].attachments) def test_record_with_attachment_no_histogram(self): boundaries = None distribution = {1: "test"} - distribution_aggregation = aggregation_module.DistributionAggregation(boundaries=boundaries, - distribution=distribution) + distribution_aggregation = aggregation_module.DistributionAggregation( + boundaries=boundaries, distribution=distribution) name = "testName" description = "testMeasure" unit = "testUnit" - measure = measure_module.MeasureInt(name=name, description=description, unit=unit) + measure = measure_module.MeasureInt( + name=name, description=description, unit=unit) description = "testMeasure" columns = ["key1", "key2"] - view = view_module.View(name=name, description=description, columns=columns, measure=measure, - aggregation=distribution_aggregation) + view = view_module.View( + name=name, + description=description, + columns=columns, + measure=measure, + aggregation=distribution_aggregation) start_time = datetime.utcnow() attachments = {"One": "one", "Two": "two"} end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) context = mock.Mock context.map = {'key1': 'val1', 'key2': 'val2'} time = datetime.utcnow().isoformat() + 'Z' value = 1 - view_data.record(context=context, value=value, timestamp=time, attachments=attachments) + view_data.record( + context=context, + value=value, + timestamp=time, + attachments=attachments) tag_values = view_data.get_tag_values( tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) @@ -187,34 +207,36 @@ def test_record_with_attachment_no_histogram(self): self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) - self.assertIsNotNone(view_data.tag_value_aggregation_data_map[tuple_vals]) - self.assertIsNone(view_data. - tag_value_aggregation_data_map[tuple_vals].exemplars) + self.assertIsNotNone( + view_data.tag_value_aggregation_data_map[tuple_vals]) + self.assertIsNone( + view_data.tag_value_aggregation_data_map[tuple_vals].exemplars) def test_record_with_multi_keys(self): measure = mock.Mock() sum_aggregation = aggregation_module.SumAggregation() - view = view_module.View( - "test_view", "description", ['key1', 'key2'], measure, sum_aggregation) + view = view_module.View("test_view", "description", ['key1', 'key2'], + measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) context = mock.Mock() context.map = {'key1': 'val1', 'key2': 'val2'} time = datetime.utcnow().isoformat() + 'Z' value = 1 self.assertEqual({}, view_data.tag_value_aggregation_data_map) - view_data.record(context=context, value=value, timestamp=time, attachments=None) + view_data.record( + context=context, value=value, timestamp=time, attachments=None) tag_values = view_data.get_tag_values( tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) - self.assertIsNotNone(view_data.tag_value_aggregation_data_map[tuple_vals]) + self.assertIsNotNone( + view_data.tag_value_aggregation_data_map[tuple_vals]) sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals) self.assertEqual(1, sum_data.sum_data) @@ -222,12 +244,17 @@ def test_record_with_multi_keys(self): context_2.map = {'key1': 'val3', 'key2': 'val2'} time_2 = datetime.utcnow().isoformat() + 'Z' value_2 = 2 - view_data.record(context=context_2, value=value_2, timestamp=time_2, attachments=None) + view_data.record( + context=context_2, + value=value_2, + timestamp=time_2, + attachments=None) tag_values_2 = view_data.get_tag_values( tags=context_2.map, columns=view.columns) tuple_vals_2 = tuple(tag_values_2) self.assertEqual(['val3', 'val2'], tag_values_2) - self.assertTrue(tuple_vals_2 in view_data.tag_value_aggregation_data_map) + self.assertTrue( + tuple_vals_2 in view_data.tag_value_aggregation_data_map) sum_data_2 = view_data.tag_value_aggregation_data_map.get(tuple_vals_2) self.assertEqual(2, sum_data_2.sum_data) @@ -235,7 +262,8 @@ def test_record_with_multi_keys(self): value_3 = 3 # Use the same context {'key1': 'val1', 'key2': 'val2'}. # Record to entry [(val1, val2), sum=1]. - view_data.record(context=context, value=value_3, timestamp=time_3, attachments=None) + view_data.record( + context=context, value=value_3, timestamp=time_3, attachments=None) self.assertEqual(4, sum_data.sum_data) # The other entry should remain unchanged. self.assertEqual(2, sum_data_2.sum_data) @@ -243,18 +271,21 @@ def test_record_with_multi_keys(self): def test_record_with_missing_key_in_context(self): measure = mock.Mock() sum_aggregation = aggregation_module.SumAggregation() - view = view_module.View( - "test_view", "description", ['key1', 'key2'], measure, sum_aggregation) + view = view_module.View("test_view", "description", ['key1', 'key2'], + measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() - view_data = view_data_module.ViewData(view=view, - start_time=start_time, - end_time=end_time) + view_data = view_data_module.ViewData( + view=view, start_time=start_time, end_time=end_time) context = mock.Mock() - context.map = {'key1': 'val1', 'key3': 'val3'} # key2 is not in the context. + context.map = { + 'key1': 'val1', + 'key3': 'val3' + } # key2 is not in the context. time = datetime.utcnow().isoformat() + 'Z' value = 4 - view_data.record(context=context, value=value, timestamp=time, attachments=None) + view_data.record( + context=context, value=value, timestamp=time, attachments=None) tag_values = view_data.get_tag_values( tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values)