From 36b3633acb4726247965da549f060be7043335cb Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Thu, 10 Aug 2023 13:23:20 -0700 Subject: [PATCH] Decouple the internal/aggregate from aggregation pkg --- sdk/metric/internal/aggregate/aggregate.go | 9 ++--- .../aggregate/exponential_histogram.go | 9 ++--- .../aggregate/exponential_histogram_test.go | 39 ++++++++++--------- sdk/metric/internal/aggregate/histogram.go | 7 ++-- .../internal/aggregate/histogram_test.go | 24 +++++------- 5 files changed, 42 insertions(+), 46 deletions(-) diff --git a/sdk/metric/internal/aggregate/aggregate.go b/sdk/metric/internal/aggregate/aggregate.go index 6dd531d1cbb1..8dec14237b9b 100644 --- a/sdk/metric/internal/aggregate/aggregate.go +++ b/sdk/metric/internal/aggregate/aggregate.go @@ -19,7 +19,6 @@ import ( "time" "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/sdk/metric/aggregation" "go.opentelemetry.io/otel/sdk/metric/metricdata" ) @@ -102,8 +101,8 @@ func (b Builder[N]) Sum(monotonic bool) (Measure[N], ComputeAggregation) { // ExplicitBucketHistogram returns a histogram aggregate function input and // output. -func (b Builder[N]) ExplicitBucketHistogram(cfg aggregation.ExplicitBucketHistogram, noSum bool) (Measure[N], ComputeAggregation) { - h := newHistogram[N](cfg, noSum) +func (b Builder[N]) ExplicitBucketHistogram(boundaries []float64, noMinMax, noSum bool) (Measure[N], ComputeAggregation) { + h := newHistogram[N](boundaries, noMinMax, noSum) switch b.Temporality { case metricdata.DeltaTemporality: return b.filter(h.measure), h.delta @@ -114,8 +113,8 @@ func (b Builder[N]) ExplicitBucketHistogram(cfg aggregation.ExplicitBucketHistog // ExponentialBucketHistogram returns a histogram aggregate function input and // output. -func (b Builder[N]) ExponentialBucketHistogram(cfg aggregation.Base2ExponentialHistogram, noSum bool) (Measure[N], ComputeAggregation) { - h := newExponentialHistogram[N](cfg, noSum) +func (b Builder[N]) ExponentialBucketHistogram(maxSize, maxScale int32, noMinMax, noSum bool) (Measure[N], ComputeAggregation) { + h := newExponentialHistogram[N](maxSize, maxScale, noMinMax, noSum) switch b.Temporality { case metricdata.DeltaTemporality: return b.filter(h.measure), h.delta diff --git a/sdk/metric/internal/aggregate/exponential_histogram.go b/sdk/metric/internal/aggregate/exponential_histogram.go index c684c87e600a..b46c100de4bd 100644 --- a/sdk/metric/internal/aggregate/exponential_histogram.go +++ b/sdk/metric/internal/aggregate/exponential_histogram.go @@ -23,7 +23,6 @@ import ( "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/sdk/metric/aggregation" "go.opentelemetry.io/otel/sdk/metric/metricdata" ) @@ -326,12 +325,12 @@ func (b *expoBuckets) downscale(delta int) { // newExponentialHistogram returns an Aggregator that summarizes a set of // measurements as an exponential histogram. Each histogram is scoped by attributes // and the aggregation cycle the measurements were made in. -func newExponentialHistogram[N int64 | float64](cfg aggregation.Base2ExponentialHistogram, noSum bool) *expoHistogram[N] { +func newExponentialHistogram[N int64 | float64](maxSize, maxScale int32, noMinMax, noSum bool) *expoHistogram[N] { return &expoHistogram[N]{ expoHistogramValues: newExpoHistValues[N]( - int(cfg.MaxSize), - int(cfg.MaxScale), - cfg.NoMinMax, + int(maxSize), + int(maxScale), + noMinMax, noSum, ), start: now(), diff --git a/sdk/metric/internal/aggregate/exponential_histogram_test.go b/sdk/metric/internal/aggregate/exponential_histogram_test.go index de949359ffd1..f1db4139cdbd 100644 --- a/sdk/metric/internal/aggregate/exponential_histogram_test.go +++ b/sdk/metric/internal/aggregate/exponential_histogram_test.go @@ -25,7 +25,6 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/internal/global" - "go.opentelemetry.io/otel/sdk/metric/aggregation" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" ) @@ -645,31 +644,33 @@ func BenchmarkAppend(b *testing.B) { } } -var expoHistConf = aggregation.Base2ExponentialHistogram{ - MaxSize: 160, - MaxScale: 20, -} - func BenchmarkExponentialHistogram(b *testing.B) { + const ( + maxSize = 160 + maxScale = 20 + noMinMax = false + noSum = false + ) + b.Run("Int64/Cumulative", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) { return Builder[int64]{ Temporality: metricdata.CumulativeTemporality, - }.ExponentialBucketHistogram(expoHistConf, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) })) b.Run("Int64/Delta", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) { return Builder[int64]{ Temporality: metricdata.DeltaTemporality, - }.ExponentialBucketHistogram(expoHistConf, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) })) b.Run("Float64/Cumulative", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) { return Builder[float64]{ Temporality: metricdata.CumulativeTemporality, - }.ExponentialBucketHistogram(expoHistConf, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) })) b.Run("Float64/Delta", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) { return Builder[float64]{ Temporality: metricdata.DeltaTemporality, - }.ExponentialBucketHistogram(expoHistConf, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) })) } @@ -711,10 +712,12 @@ type exponentialHistogramAggregationTestCase[N int64 | float64] struct { } func testExponentialHistogramAggregation[N int64 | float64](t *testing.T) { - cfg := aggregation.Base2ExponentialHistogram{ - MaxSize: 4, - MaxScale: 20, - } + const ( + maxSize = 4 + maxScale = 20 + noMinMax = false + noSum = false + ) tests := []exponentialHistogramAggregationTestCase[N]{ { @@ -722,7 +725,7 @@ func testExponentialHistogramAggregation[N int64 | float64](t *testing.T) { build: func() (Measure[N], ComputeAggregation) { return Builder[N]{ Temporality: metricdata.DeltaTemporality, - }.ExponentialBucketHistogram(cfg, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) }, input: [][]N{ {4, 4, 4, 2, 16, 1}, @@ -750,7 +753,7 @@ func testExponentialHistogramAggregation[N int64 | float64](t *testing.T) { build: func() (Measure[N], ComputeAggregation) { return Builder[N]{ Temporality: metricdata.CumulativeTemporality, - }.ExponentialBucketHistogram(cfg, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) }, input: [][]N{ {4, 4, 4, 2, 16, 1}, @@ -778,7 +781,7 @@ func testExponentialHistogramAggregation[N int64 | float64](t *testing.T) { build: func() (Measure[N], ComputeAggregation) { return Builder[N]{ Temporality: metricdata.DeltaTemporality, - }.ExponentialBucketHistogram(cfg, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) }, input: [][]N{ {2, 3, 8}, @@ -807,7 +810,7 @@ func testExponentialHistogramAggregation[N int64 | float64](t *testing.T) { build: func() (Measure[N], ComputeAggregation) { return Builder[N]{ Temporality: metricdata.CumulativeTemporality, - }.ExponentialBucketHistogram(cfg, false) + }.ExponentialBucketHistogram(maxSize, maxScale, noMinMax, noSum) }, input: [][]N{ {2, 3, 8}, diff --git a/sdk/metric/internal/aggregate/histogram.go b/sdk/metric/internal/aggregate/histogram.go index cd030076fdc5..62ec51e1f5ed 100644 --- a/sdk/metric/internal/aggregate/histogram.go +++ b/sdk/metric/internal/aggregate/histogram.go @@ -21,7 +21,6 @@ import ( "time" "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/sdk/metric/aggregation" "go.opentelemetry.io/otel/sdk/metric/metricdata" ) @@ -109,10 +108,10 @@ func (s *histValues[N]) measure(_ context.Context, value N, attr attribute.Set) // newHistogram returns an Aggregator that summarizes a set of measurements as // an histogram. -func newHistogram[N int64 | float64](cfg aggregation.ExplicitBucketHistogram, noSum bool) *histogram[N] { +func newHistogram[N int64 | float64](boundaries []float64, noMinMax, noSum bool) *histogram[N] { return &histogram[N]{ - histValues: newHistValues[N](cfg.Boundaries, noSum), - noMinMax: cfg.NoMinMax, + histValues: newHistValues[N](boundaries, noSum), + noMinMax: noMinMax, start: now(), } } diff --git a/sdk/metric/internal/aggregate/histogram_test.go b/sdk/metric/internal/aggregate/histogram_test.go index 68a00f2a90fc..ab44607e5f60 100644 --- a/sdk/metric/internal/aggregate/histogram_test.go +++ b/sdk/metric/internal/aggregate/histogram_test.go @@ -23,17 +23,13 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/sdk/metric/aggregation" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" ) var ( bounds = []float64{1, 5} - histConf = aggregation.ExplicitBucketHistogram{ - Boundaries: bounds, - NoMinMax: false, - } + noMinMax = false ) func TestHistogram(t *testing.T) { @@ -59,7 +55,7 @@ func testDeltaHist[N int64 | float64](c conf[N]) func(t *testing.T) { in, out := Builder[N]{ Temporality: metricdata.DeltaTemporality, Filter: attrFltr, - }.ExplicitBucketHistogram(histConf, c.noSum) + }.ExplicitBucketHistogram(bounds, noMinMax, c.noSum) ctx := context.Background() return test[N](in, out, []teststep[N]{ { @@ -125,7 +121,7 @@ func testCumulativeHist[N int64 | float64](c conf[N]) func(t *testing.T) { in, out := Builder[N]{ Temporality: metricdata.CumulativeTemporality, Filter: attrFltr, - }.ExplicitBucketHistogram(histConf, c.noSum) + }.ExplicitBucketHistogram(bounds, noMinMax, c.noSum) ctx := context.Background() return test[N](in, out, []teststep[N]{ { @@ -277,7 +273,7 @@ func TestHistogramImmutableBounds(t *testing.T) { cpB := make([]float64, len(b)) copy(cpB, b) - h := newHistogram[int64](aggregation.ExplicitBucketHistogram{Boundaries: b}, false) + h := newHistogram[int64](b, false, false) require.Equal(t, cpB, h.bounds) b[0] = 10 @@ -293,7 +289,7 @@ func TestHistogramImmutableBounds(t *testing.T) { } func TestCumulativeHistogramImutableCounts(t *testing.T) { - h := newHistogram[int64](histConf, false) + h := newHistogram[int64](bounds, noMinMax, false) h.measure(context.Background(), 5, alice) var data metricdata.Aggregation = metricdata.Histogram[int64]{} @@ -311,7 +307,7 @@ func TestCumulativeHistogramImutableCounts(t *testing.T) { func TestDeltaHistogramReset(t *testing.T) { t.Cleanup(mockTime(now)) - h := newHistogram[int64](histConf, false) + h := newHistogram[int64](bounds, noMinMax, false) var data metricdata.Aggregation = metricdata.Histogram[int64]{} require.Equal(t, 0, h.delta(&data)) @@ -340,21 +336,21 @@ func BenchmarkHistogram(b *testing.B) { b.Run("Int64/Cumulative", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) { return Builder[int64]{ Temporality: metricdata.CumulativeTemporality, - }.ExplicitBucketHistogram(histConf, false) + }.ExplicitBucketHistogram(bounds, noMinMax, false) })) b.Run("Int64/Delta", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) { return Builder[int64]{ Temporality: metricdata.DeltaTemporality, - }.ExplicitBucketHistogram(histConf, false) + }.ExplicitBucketHistogram(bounds, noMinMax, false) })) b.Run("Float64/Cumulative", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) { return Builder[float64]{ Temporality: metricdata.CumulativeTemporality, - }.ExplicitBucketHistogram(histConf, false) + }.ExplicitBucketHistogram(bounds, noMinMax, false) })) b.Run("Float64/Delta", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) { return Builder[float64]{ Temporality: metricdata.DeltaTemporality, - }.ExplicitBucketHistogram(histConf, false) + }.ExplicitBucketHistogram(bounds, noMinMax, false) })) }