diff --git a/CHANGELOG.md b/CHANGELOG.md index 100ef78eaf4..4d5433ee564 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,7 +11,8 @@ For experimental package changes, see the [experimental CHANGELOG](experimental/ ### :rocket: (Enhancement) -* feat(sdk-metrics): add exponential histogram mapping functions [#3504](https://github.com/open-telemetry/opentelemetry-js/pull/3502) @mwear +* feat(sdk-metrics): add exponential histogram accumulation / aggregator [#3505](https://github.com/open-telemetry/opentelemetry-js/pull/3505) @mwear +* feat(sdk-metrics): add exponential histogram mapping functions [#3504](https://github.com/open-telemetry/opentelemetry-js/pull/3504) @mwear * feat(api): add `getActiveBaggage` API [#3385](https://github.com/open-telemetry/opentelemetry-js/pull/3385) * feat(instrumentation-grpc): set net.peer.name and net.peer.port on client spans [#3430](https://github.com/open-telemetry/opentelemetry-js/pull/3430) diff --git a/experimental/packages/otlp-transformer/src/metrics/internal.ts b/experimental/packages/otlp-transformer/src/metrics/internal.ts index 64012a5100e..98a32e33d0d 100644 --- a/experimental/packages/otlp-transformer/src/metrics/internal.ts +++ b/experimental/packages/otlp-transformer/src/metrics/internal.ts @@ -19,6 +19,7 @@ import { AggregationTemporality, DataPoint, DataPointType, + ExponentialHistogram, Histogram, MetricData, ResourceMetrics, @@ -96,7 +97,10 @@ export function toMetric(metricData: MetricData): IMetric { } function toSingularDataPoint( - dataPoint: DataPoint | DataPoint, + dataPoint: + | DataPoint + | DataPoint + | DataPoint, valueType: ValueType ) { const out: INumberDataPoint = { diff --git a/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts b/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts new file mode 100644 index 00000000000..3edab1118c1 --- /dev/null +++ b/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts @@ -0,0 +1,611 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + Accumulation, + AccumulationRecord, + Aggregator, + AggregatorKind, +} from './types'; +import { + DataPointType, + ExponentialHistogramMetricData, +} from '../export/MetricData'; +import { diag, HrTime } from '@opentelemetry/api'; +import { InstrumentDescriptor, InstrumentType } from '../InstrumentDescriptor'; +import { Maybe } from '../utils'; +import { AggregationTemporality } from '../export/AggregationTemporality'; +import { Buckets } from './exponential-histogram/Buckets'; +import { Mapping } from './exponential-histogram/mapping/types'; +import { ExponentMapping } from './exponential-histogram/mapping/ExponentMapping'; +import { LogarithmMapping } from './exponential-histogram/mapping/LogarithmMapping'; +import * as util from './exponential-histogram//util'; + +/** + * Internal value type for ExponentialHistogramAggregation. + * Differs from the exported type as undefined sum/min/max complicate arithmetic + * performed by this aggregation, but are required to be undefined in the exported types. + */ +interface InternalHistogram { + positive: { + offset: number; + bucketCounts: number[]; + }; + negative: { + offset: number; + bucketCounts: number[]; + }; + hasMinMax: boolean; + min: number; + max: number; + sum: number; + count: number; + scale: number; + zeroCount: number; +} + +// HighLow is a utility class used for computing a common scale for +// two exponential histogram accumulations +class HighLow { + static combine(h1: HighLow, h2: HighLow): HighLow { + return new HighLow(Math.min(h1.low, h2.low), Math.max(h1.high, h2.high)); + } + constructor(public low: number, public high: number) {} +} + +export class ExponentialHistogramAccumulation implements Accumulation { + static DEFAULT_MAX_SIZE = 160; + static MIN_MAX_SIZE = 2; + + constructor( + public startTime: HrTime = startTime, + private _maxSize = ExponentialHistogramAccumulation.DEFAULT_MAX_SIZE, + private _recordMinMax = true, + private _sum = 0, + private _count = 0, + private _zeroCount = 0, + private _min = Number.POSITIVE_INFINITY, + private _max = Number.NEGATIVE_INFINITY, + private _positive = new Buckets(), + private _negative = new Buckets(), + private _mapping: Mapping = LogarithmMapping.get(LogarithmMapping.MAX_SCALE) + ) { + if (this._maxSize < ExponentialHistogramAccumulation.MIN_MAX_SIZE) { + diag.warn(`Exponential Histogram Max Size set to ${this._maxSize}, \ + changing to the minimum size of: \ + ${ExponentialHistogramAccumulation.MIN_MAX_SIZE}`); + this._maxSize = ExponentialHistogramAccumulation.MIN_MAX_SIZE; + } + } + + /** + * record updates a histogram with a single count + * @param {Number} value + */ + record(value: number) { + this.updateByIncrement(value, 1); + } + + /** + * Sets the start time for this accumulation + * @param {HrTime} startTime + */ + setStartTime(startTime: HrTime): void { + this.startTime = startTime; + } + + /** + * Returns the datapoint representation of this accumulation + * @param {HrTime} startTime + */ + toPointValue(): InternalHistogram { + return { + hasMinMax: this._recordMinMax, + min: this.min(), + max: this.max(), + sum: this.sum(), + positive: { + offset: this.positive().offset(), + bucketCounts: this.positive().counts(), + }, + negative: { + offset: this.negative().offset(), + bucketCounts: this.negative().counts(), + }, + count: this.count(), + scale: this.scale(), + zeroCount: this.zeroCount(), + }; + } + + /** + * @returns {Number} The sum of values recorded by this accumulation + */ + sum(): number { + return this._sum; + } + + /** + * @returns {Number} The minimum value recorded by this accumulation + */ + min(): number { + return this._min; + } + + /** + * @returns {Number} The maximum value recorded by this accumulation + */ + max(): number { + return this._max; + } + + /** + * @returns {Number} The count of values recorded by this accumulation + */ + count(): number { + return this._count; + } + + /** + * @returns {Number} The number of 0 values recorded by this accumulation + */ + zeroCount(): number { + return this._zeroCount; + } + + /** + * @returns {Number} The scale used by thie accumulation + */ + scale(): number { + if (this._count === this._zeroCount) { + // all zeros! scale doesn't matter, use zero + return 0; + } + return this._mapping.scale(); + } + + /** + * positive holds the postive values + * @returns {Buckets} + */ + positive(): Buckets { + return this._positive; + } + + /** + * negative holds the negative values by their absolute value + * @returns {Buckets} + */ + negative(): Buckets { + return this._negative; + } + + /** + * uppdateByIncr supports updating a histogram with a non-negative + * increment. + * @param value + * @param increment + */ + updateByIncrement(value: number, increment: number) { + if (value > this._max) { + this._max = value; + } + if (value < this._min) { + this._min = value; + } + + this._count += increment; + + if (value === 0) { + this._zeroCount += increment; + return; + } + + this._sum += value * increment; + + if (value > 0) { + this._updateBuckets(this._positive, value, increment); + } else { + this._updateBuckets(this._negative, -value, increment); + } + } + + /** + * merge combines data from other into self + * @param {ExponentialHistogramAccumulation} other + */ + merge(other: ExponentialHistogramAccumulation) { + if (this._count === 0) { + this._min = other.min(); + this._max = other.max(); + } else if (other.count() !== 0) { + if (other.min() < this.min()) { + this._min = other.min(); + } + if (other.max() > this.max()) { + this._max = other.max(); + } + } + + this._sum += other.sum(); + this._count += other.count(); + this._zeroCount += other.zeroCount(); + + const minScale = this._minScale(other); + + this._downscale(this.scale() - minScale); + + this._mergeBuckets(this.positive(), other, other.positive(), minScale); + this._mergeBuckets(this.negative(), other, other.negative(), minScale); + } + + /** + * diff substracts other from self + * @param {ExponentialHistogramAccumulation} other + */ + diff(other: ExponentialHistogramAccumulation) { + this._min = Infinity; + this._max = -Infinity; + this._sum -= other.sum(); + this._count -= other.count(); + this._zeroCount -= other.zeroCount(); + + const minScale = this._minScale(other); + + this._downscale(this.scale() - minScale); + + this._diffBuckets(this.positive(), other, other.positive(), minScale); + this._diffBuckets(this.negative(), other, other.negative(), minScale); + } + + /** + * clone returns a deep copy of self + * @returns {ExponentialHistogramAccumulation} + */ + clone(): ExponentialHistogramAccumulation { + return new ExponentialHistogramAccumulation( + this.startTime, + this._maxSize, + this._recordMinMax, + this._sum, + this._count, + this._zeroCount, + this._min, + this._max, + this.positive().clone(), + this.negative().clone(), + this._mapping + ); + } + + /** + * _updateBuckets maps the incoming value to a bucket index for the current + * scale. If the bucket index is outside of the range of the backing array, + * it will rescale the backing array and update the mapping for the new scale. + */ + private _updateBuckets(buckets: Buckets, value: number, increment: number) { + let index = this._mapping.mapToIndex(value); + + // rescale the mapping if needed + let rescalingNeeded = false; + let high = 0; + let low = 0; + + if (buckets.length() === 0) { + buckets.indexStart = index; + buckets.indexEnd = buckets.indexStart; + buckets.indexBase = buckets.indexStart; + } else if ( + index < buckets.indexStart && + buckets.indexEnd - index >= this._maxSize + ) { + rescalingNeeded = true; + low = index; + high = buckets.indexEnd; + } else if ( + index > buckets.indexEnd && + index - buckets.indexStart >= this._maxSize + ) { + rescalingNeeded = true; + low = buckets.indexStart; + high = index; + } + + // rescale and compute index at new scale + if (rescalingNeeded) { + const change = this._changeScale(high, low); + this._downscale(change); + index = this._mapping.mapToIndex(value); + } + + this._incrementIndexBy(buckets, index, increment); + } + + /** + * _incrementIndexBy increments the count of the bucket specified by `index`. + * If the index is outside of the range [buckets.indexStart, buckets.indexEnd] + * the boundaries of the backing array will be adjusted and more buckets will + * be added if needed. + */ + private _incrementIndexBy( + buckets: Buckets, + index: number, + increment: number + ) { + if (increment === 0) { + // nothing to do for a zero increment, can happen during a merge operation + return; + } + + if (index < buckets.indexStart) { + const span = buckets.indexEnd - index; + if (span >= buckets.backing.length()) { + this._grow(buckets, span + 1); + } + buckets.indexStart = index; + } else if (index > buckets.indexEnd) { + const span = index - buckets.indexStart; + if (span >= buckets.backing.length()) { + this._grow(buckets, span + 1); + } + buckets.indexEnd = index; + } + + let bucketIndex = index - buckets.indexBase; + if (bucketIndex < 0) { + bucketIndex += buckets.backing.length(); + } + buckets.incrementBucket(bucketIndex, increment); + } + + /** + * grow resizes the backing array by doubling in size up to maxSize. + * This extends the array with a bunch of zeros and copies the + * existing counts to the same position. + */ + private _grow(buckets: Buckets, needed: number) { + const size = buckets.backing.length(); + const bias = buckets.indexBase - buckets.indexStart; + const oldPositiveLimit = size - bias; + let newSize = util.powTwoRoundedUp(needed); + if (newSize > this._maxSize) { + newSize = this._maxSize; + } + const newPositiveLimit = newSize - bias; + buckets.backing.growTo(newSize, oldPositiveLimit, newPositiveLimit); + } + + /** + * _changeScale computes how much downscaling is needed by shifting the + * high and low values until they are separated by no more than size. + */ + private _changeScale(high: number, low: number): number { + let change = 0; + while (high - low >= this._maxSize) { + high >>= 1; + low >>= 1; + change++; + } + return change; + } + + /** + * _downscale subtracts `change` from the current mapping scale. + */ + private _downscale(change: number) { + if (change === 0) { + return; + } + if (change < 0) { + // Note: this should be impossible. If we get here it's because + // there is a bug in the implementation. + throw new Error(`impossible change of scale: ${this.scale}`); + } + const newScale = this._mapping.scale() - change; + + this._positive.downscale(change); + this._negative.downscale(change); + + if (newScale <= 0) { + this._mapping = ExponentMapping.get(newScale); + } else { + this._mapping = LogarithmMapping.get(newScale); + } + } + + /** + * _minScale is used by diff and merge to compute an ideal combined scale + */ + private _minScale(other: ExponentialHistogramAccumulation): number { + const minScale = Math.min(this.scale(), other.scale()); + + const highLowPos = HighLow.combine( + this._highLowAtScale(this.positive(), this.scale(), minScale), + this._highLowAtScale(other.positive(), other.scale(), minScale) + ); + + const highLowNeg = HighLow.combine( + this._highLowAtScale(this.negative(), this.scale(), minScale), + this._highLowAtScale(other.negative(), other.scale(), minScale) + ); + + return Math.min( + minScale - this._changeScale(highLowPos.high, highLowPos.low), + minScale - this._changeScale(highLowNeg.high, highLowNeg.low) + ); + } + + /** + * _highLowAtScale is used by diff and merge to compute an ideal combined scale. + */ + private _highLowAtScale( + buckets: Buckets, + currentScale: number, + newScale: number + ): HighLow { + if (buckets.length() === 0) { + return new HighLow(0, -1); + } + const shift = currentScale - newScale; + return new HighLow(buckets.indexStart >> shift, buckets.indexEnd >> shift); + } + + /** + * _mergeBuckets translates index values from another histogram and + * adds the values into the corresponding buckets of this histogram. + */ + private _mergeBuckets( + ours: Buckets, + other: ExponentialHistogramAccumulation, + theirs: Buckets, + scale: number + ) { + const theirOffset = theirs.offset(); + const theirChange = other.scale() - scale; + + for (let i = 0; i < theirs.length(); i++) { + this._incrementIndexBy( + ours, + (theirOffset + i) >> theirChange, + theirs.at(i) + ); + } + } + + /** + * _diffBuckets translates index values from another histogram and + * subtracts the values in the corresponding buckets of this histogram. + */ + private _diffBuckets( + ours: Buckets, + other: ExponentialHistogramAccumulation, + theirs: Buckets, + scale: number + ) { + const theirOffset = theirs.offset(); + const theirChange = other.scale() - scale; + + for (let i = 0; i < theirs.length(); i++) { + const ourIndex = (theirOffset + i) >> theirChange; + let bucketIndex = ourIndex - ours.indexBase; + if (bucketIndex < 0) { + bucketIndex += ours.backing.length(); + } + ours.decrementBucket(bucketIndex, theirs.at(i)); + } + + ours.trim(); + } +} + +/** + * Aggregator for ExponentialHistogramAccumlations + */ +export class ExponentialHistogramAggregator + implements Aggregator +{ + public kind: AggregatorKind.EXPONENTIAL_HISTOGRAM = + AggregatorKind.EXPONENTIAL_HISTOGRAM; + + /** + * @param _maxSize Maximum number of buckets for each of the positive + * and negative ranges, exclusive of the zero-bucket. + * @param _recordMinMax If set to true, min and max will be recorded. + * Otherwise, min and max will not be recorded. + */ + constructor( + readonly _maxSize: number, + private readonly _recordMinMax: boolean + ) {} + + createAccumulation(startTime: HrTime) { + return new ExponentialHistogramAccumulation( + startTime, + this._maxSize, + this._recordMinMax + ); + } + + /** + * Return the result of the merge of two exponential histogram accumulations. + */ + merge( + previous: ExponentialHistogramAccumulation, + delta: ExponentialHistogramAccumulation + ): ExponentialHistogramAccumulation { + const result = delta.clone(); + result.merge(previous); + + return result; + } + + /** + * Returns a new DELTA aggregation by comparing two cumulative measurements. + */ + diff( + previous: ExponentialHistogramAccumulation, + current: ExponentialHistogramAccumulation + ): ExponentialHistogramAccumulation { + const result = current.clone(); + result.diff(previous); + + return result; + } + + toMetricData( + descriptor: InstrumentDescriptor, + aggregationTemporality: AggregationTemporality, + accumulationByAttributes: AccumulationRecord[], + endTime: HrTime + ): Maybe { + return { + descriptor, + aggregationTemporality, + dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM, + dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => { + const pointValue = accumulation.toPointValue(); + + // determine if instrument allows negative values. + const allowsNegativeValues = + descriptor.type === InstrumentType.UP_DOWN_COUNTER || + descriptor.type === InstrumentType.OBSERVABLE_GAUGE || + descriptor.type === InstrumentType.OBSERVABLE_UP_DOWN_COUNTER; + + return { + attributes, + startTime: accumulation.startTime, + endTime, + value: { + min: pointValue.hasMinMax ? pointValue.min : undefined, + max: pointValue.hasMinMax ? pointValue.max : undefined, + sum: !allowsNegativeValues ? pointValue.sum : undefined, + positive: { + offset: pointValue.positive.offset, + bucketCounts: pointValue.positive.bucketCounts, + }, + negative: { + offset: pointValue.negative.offset, + bucketCounts: pointValue.negative.bucketCounts, + }, + count: pointValue.count, + scale: pointValue.scale, + zeroCount: pointValue.zeroCount, + }, + }; + }), + }; + } +} diff --git a/packages/sdk-metrics/src/aggregator/exponential-histogram/Buckets.ts b/packages/sdk-metrics/src/aggregator/exponential-histogram/Buckets.ts new file mode 100644 index 00000000000..9c108a86b18 --- /dev/null +++ b/packages/sdk-metrics/src/aggregator/exponential-histogram/Buckets.ts @@ -0,0 +1,299 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export class Buckets { + /** + * The term index refers to the number of the exponential histogram bucket + * used to determine its boundaries. The lower boundary of a bucket is + * determined by base ** index and the upper boundary of a bucket is + * determined by base ** (index + 1). index values are signed to account + * for values less than or equal to 1. + * + * indexBase is the index of the 0th position in the + * backing array, i.e., backing[0] is the count + * in the bucket with index `indexBase`. + * + * indexStart is the smallest index value represented + * in the backing array. + * + * indexEnd is the largest index value represented in + * the backing array. + */ + constructor( + public backing = new BucketsBacking(), + public indexBase = 0, + public indexStart = 0, + public indexEnd = 0 + ) {} + + /** + * Offset is the bucket index of the smallest entry in the counts array + * @returns {number} + */ + offset(): number { + return this.indexStart; + } + + /** + * An array of counts, where count[i] carries the count + * of the bucket at index (offset+i). count[i] is the count of + * values greater than base^(offset+i) and less than or equal to + * base^(offset+i+1). + * @returns {number} The logical counts based on the backing array + */ + counts(): number[] { + return Array.from({ length: this.length() }, (_, i) => this.at(i)); + } + + /** + * Buckets is a view into the backing array. + * @returns {number} + */ + length(): number { + if (this.backing.length() === 0) { + return 0; + } + + if (this.indexEnd === this.indexStart && this.at(0) === 0) { + return 0; + } + + return this.indexEnd - this.indexStart + 1; + } + + /** + * At returns the count of the bucket at a position in the logical + * array of counts. + * @param position + * @returns {number} + */ + at(position: number): number { + const bias = this.indexBase - this.indexStart; + if (position < bias) { + position += this.backing.length(); + } + + position -= bias; + return this.backing.countAt(position); + } + + /** + * incrementBucket increments the backing array index by `increment` + * @param bucketIndex + * @param increment + */ + incrementBucket(bucketIndex: number, increment: number) { + this.backing.increment(bucketIndex, increment); + } + + /** + * decrementBucket decrements the backing array index by `decrement` + * if decrement is greater than the current value, it's set to 0. + * @param bucketIndex + * @param increment + */ + decrementBucket(bucketIndex: number, decrement: number) { + this.backing.decrement(bucketIndex, decrement); + } + + /** + * trim removes leading and / or trailing zero buckets (which can occur + * after diffing two histos) and rotates the backing array so that the + * smallest non-zero index is in the 0th position of the backing array + */ + trim() { + for (let i = 0; i < this.length(); i++) { + if (this.at(i) !== 0) { + this.indexStart += i; + break; + } else if (i === this.length() - 1) { + //the entire array is zeroed out + this.indexStart = this.indexEnd = this.indexBase = 0; + return; + } + } + + for (let i = this.length() - 1; i >= 0; i--) { + if (this.at(i) !== 0) { + this.indexEnd -= this.length() - i - 1; + break; + } + } + + this._rotate(); + } + + /** + * downscale first rotates, then collapses 2**`by`-to-1 buckets. + * @param by + */ + downscale(by: number) { + this._rotate(); + + const size = 1 + this.indexEnd - this.indexStart; + const each = 1 << by; + let inpos = 0; + let outpos = 0; + + for (let pos = this.indexStart; pos <= this.indexEnd; ) { + let mod = pos % each; + if (mod < 0) { + mod += each; + } + for (let i = mod; i < each && inpos < size; i++) { + this._relocateBucket(outpos, inpos); + inpos++; + pos++; + } + outpos++; + } + + this.indexStart >>= by; + this.indexEnd >>= by; + this.indexBase = this.indexStart; + } + + /** + * Clone returns a deep copy of Buckets + * @returns {Buckets} + */ + clone(): Buckets { + return new Buckets( + this.backing.clone(), + this.indexBase, + this.indexStart, + this.indexEnd + ); + } + + /** + * _rotate shifts the backing array contents so that indexStart == + * indexBase to simplify the downscale logic. + */ + private _rotate() { + const bias = this.indexBase - this.indexStart; + + if (bias === 0) { + return; + } else if (bias > 0) { + this.backing.reverse(0, this.backing.length()); + this.backing.reverse(0, bias); + this.backing.reverse(bias, this.backing.length()); + } else { + // negative bias, this can happen when diffing two histograms + this.backing.reverse(0, this.backing.length()); + this.backing.reverse(0, this.backing.length() + bias); + } + this.indexBase = this.indexStart; + } + + /** + * _relocateBucket adds the count in counts[src] to counts[dest] and + * resets count[src] to zero. + */ + private _relocateBucket(dest: number, src: number) { + if (dest === src) { + return; + } + this.incrementBucket(dest, this.backing.emptyBucket(src)); + } +} + +/** + * BucketsBacking holds the raw buckets and some utility methods to + * manage them. + */ +class BucketsBacking { + constructor(private _counts = [0]) {} + + /** + * length returns the physical size of the backing array, which + * is >= buckets.length() + */ + length(): number { + return this._counts.length; + } + + /** + * countAt returns the count in a specific bucket + */ + countAt(pos: number): number { + return this._counts[pos]; + } + + /** + * growTo grows a backing array and copies old entries + * into their correct new positions. + */ + growTo(newSize: number, oldPositiveLimit: number, newPositiveLimit: number) { + const tmp = new Array(newSize).fill(0); + tmp.splice( + newPositiveLimit, + this._counts.length - oldPositiveLimit, + ...this._counts.slice(oldPositiveLimit) + ); + tmp.splice(0, oldPositiveLimit, ...this._counts.slice(0, oldPositiveLimit)); + this._counts = tmp; + } + + /** + * reverse the items in the backing array in the range [from, limit). + */ + reverse(from: number, limit: number) { + const num = Math.floor((from + limit) / 2) - from; + for (let i = 0; i < num; i++) { + const tmp = this._counts[from + i]; + this._counts[from + i] = this._counts[limit - i - 1]; + this._counts[limit - i - 1] = tmp; + } + } + + /** + * emptyBucket empties the count from a bucket, for + * moving into another. + */ + emptyBucket(src: number): number { + const tmp = this._counts[src]; + this._counts[src] = 0; + return tmp; + } + + /** + * increments a bucket by `increment` + */ + increment(bucketIndex: number, increment: number) { + this._counts[bucketIndex] += increment; + } + + /** + * decrements a bucket by `decrement` + */ + decrement(bucketIndex: number, decrement: number) { + if (this._counts[bucketIndex] >= decrement) { + this._counts[bucketIndex] -= decrement; + } else { + // this should not happen, but we're being defensive against + // negative counts. + this._counts[bucketIndex] = 0; + } + } + + /** + * clone returns a deep copy of BucketsBacking + */ + clone(): BucketsBacking { + return new BucketsBacking([...this._counts]); + } +} diff --git a/packages/sdk-metrics/src/aggregator/exponential-histogram/util.ts b/packages/sdk-metrics/src/aggregator/exponential-histogram/util.ts index 356bbab2601..d0f7edb6cae 100644 --- a/packages/sdk-metrics/src/aggregator/exponential-histogram/util.ts +++ b/packages/sdk-metrics/src/aggregator/exponential-histogram/util.ts @@ -38,3 +38,17 @@ export function ldexp(frac: number, exp: number): number { } return frac * Math.pow(2, exp); } + +export function powTwoRoundedUp(v: number): number { + // The following expression computes the least power-of-two + // that is >= v. There are a number of tricky ways to + // do this, see https://stackoverflow.com/questions/466204/rounding-up-to-next-power-of-2 + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v++; + return v; +} diff --git a/packages/sdk-metrics/src/aggregator/types.ts b/packages/sdk-metrics/src/aggregator/types.ts index 19008da2503..16888256b2b 100644 --- a/packages/sdk-metrics/src/aggregator/types.ts +++ b/packages/sdk-metrics/src/aggregator/types.ts @@ -26,6 +26,7 @@ export enum AggregatorKind { SUM, LAST_VALUE, HISTOGRAM, + EXPONENTIAL_HISTOGRAM, } /** DataPoint value type for SumAggregation. */ @@ -62,6 +63,24 @@ export interface Histogram { max?: number; } +/** DataPoint value type for ExponentialHistogramAggregation. */ +export interface ExponentialHistogram { + count: number; + sum?: number; + scale: number; + zeroCount: number; + positive: { + offset: number; + bucketCounts: number[]; + }; + negative: { + offset: number; + bucketCounts: number[]; + }; + min?: number; + max?: number; +} + /** * An Aggregator accumulation state. */ diff --git a/packages/sdk-metrics/src/export/MetricData.ts b/packages/sdk-metrics/src/export/MetricData.ts index e7adfa0357a..42781e69f5d 100644 --- a/packages/sdk-metrics/src/export/MetricData.ts +++ b/packages/sdk-metrics/src/export/MetricData.ts @@ -19,7 +19,7 @@ import { InstrumentationScope } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; import { AggregationTemporality } from './AggregationTemporality'; -import { Histogram } from '../aggregator/types'; +import { Histogram, ExponentialHistogram } from '../aggregator/types'; /** * Basic metric data fields. @@ -56,10 +56,22 @@ export interface HistogramMetricData extends BaseMetricData { readonly dataPoints: DataPoint[]; } +/** + * Represents a metric data aggregated by a HistogramAggregation. + */ +export interface ExponentialHistogramMetricData extends BaseMetricData { + readonly dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM; + readonly dataPoints: DataPoint[]; +} + /** * Represents an aggregated metric data. */ -export type MetricData = SumMetricData | GaugeMetricData | HistogramMetricData; +export type MetricData = + | SumMetricData + | GaugeMetricData + | HistogramMetricData + | ExponentialHistogramMetricData; export interface ScopeMetrics { scope: InstrumentationScope; diff --git a/packages/sdk-metrics/src/index.ts b/packages/sdk-metrics/src/index.ts index d77f6a4b100..45559a54b8d 100644 --- a/packages/sdk-metrics/src/index.ts +++ b/packages/sdk-metrics/src/index.ts @@ -14,7 +14,12 @@ * limitations under the License. */ -export { Sum, LastValue, Histogram } from './aggregator/types'; +export { + Sum, + LastValue, + Histogram, + ExponentialHistogram, +} from './aggregator/types'; export { AggregationSelector, diff --git a/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts b/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts new file mode 100644 index 00000000000..63093e648f2 --- /dev/null +++ b/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts @@ -0,0 +1,829 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { HrTime, ValueType } from '@opentelemetry/api'; +import { + AggregationTemporality, + DataPointType, + InstrumentType, + MetricData, +} from '../../src'; +import { + ExponentialHistogramAccumulation, + ExponentialHistogramAggregator, +} from '../../src/aggregator/ExponentialHistogram'; +import { Buckets } from '../../src/aggregator/exponential-histogram/Buckets'; +import { Mapping } from '../../src/aggregator/exponential-histogram//mapping/types'; +import { ExponentMapping } from '../../src/aggregator/exponential-histogram//mapping/ExponentMapping'; +import { LogarithmMapping } from '../../src/aggregator/exponential-histogram/mapping/LogarithmMapping'; +import * as assert from 'assert'; +import { + assertInEpsilon, + assertInDelta, +} from './exponential-histogram/helpers'; +import { defaultInstrumentDescriptor } from '../util'; + +describe('ExponentialHistogramAccumulation', () => { + describe('record', () => { + /** + * Tests insertion of [2, 4, 1]. The index of 2 (i.e., 0) becomes + * `indexBase`, the 4 goes to its right and the 1 goes in the last + * position of the backing array. With 3 binary orders of magnitude + * and MaxSize=4, this must finish with scale=0; with minimum value 1 + * this must finish with offset=-1 (all scales). + */ + it('handles alternating growth: scenario 1', () => { + const accumulation = new ExponentialHistogramAccumulation([0, 0], 4); + accumulation.record(2); + accumulation.record(4); + accumulation.record(1); + + assert.strictEqual(accumulation.positive().offset(), -1); + assert.strictEqual(accumulation.scale(), 0); + assert.deepStrictEqual(getCounts(accumulation.positive()), [1, 1, 1]); + }); + + /** + * Tests insertion of [2, 2, 4, 1, 8, 0.5]. The test proceeds as + * above but then downscales once further to scale=-1, thus index -1 + * holds range [0.25, 1.0), index 0 holds range [1.0, 4), index 1 + * holds range [4, 16). + */ + it('handles alternating growth: scenario 2', () => { + const accumulation = new ExponentialHistogramAccumulation([0, 0], 4); + accumulation.record(2); + accumulation.record(2); + accumulation.record(2); + accumulation.record(1); + accumulation.record(8); + accumulation.record(0.5); + + assert.strictEqual(accumulation.positive().offset(), -1); + assert.strictEqual(accumulation.scale(), -1); + assert.deepStrictEqual(getCounts(accumulation.positive()), [2, 3, 1]); + }); + + it('handles permutations of [1/2, 1, 2] with maxSize: 2', () => { + [ + [1, 0.5, 2], + [1, 2, 0.5], + [2, 0.5, 1], + [2, 1, 0.5], + [0.5, 1, 2], + [0.5, 2, 1], + ].forEach(row => { + const accumulation = new ExponentialHistogramAccumulation([0, 0], 2); + row.forEach(value => { + accumulation.record(value); + }); + + assert.strictEqual(accumulation.scale(), -1); + assert.strictEqual(accumulation.positive().offset(), -1); + assert.strictEqual(accumulation.positive().length(), 2); + assert.strictEqual(accumulation.positive().at(0), 2); + assert.strictEqual(accumulation.positive().at(1), 1); + }); + }); + + it('handles permutations of [1, 2, 4] with maxSize: 2', () => { + [ + [1, 2, 4], + [1, 4, 2], + [2, 4, 1], + [2, 1, 4], + [4, 1, 2], + [4, 2, 1], + ].forEach(row => { + const accumulation = new ExponentialHistogramAccumulation([0, 0], 2); + row.forEach(value => { + accumulation.record(value); + }); + + assert.strictEqual(accumulation.scale(), -1); + assert.strictEqual(accumulation.positive().offset(), -1); + assert.strictEqual(accumulation.positive().length(), 2); + assert.strictEqual(accumulation.positive().at(0), 1); + assert.strictEqual(accumulation.positive().at(1), 2); + }); + + // Tests that every permutation of {1, 1/2, 1/4} with maxSize=2 + // results in the same scale=-1 histogram. + it('handles permutations of [1, 1/2, 1/4] with maxSize: 2', () => { + [ + [1, 0.5, 0.25], + [1, 0.25, 0.5], + [0.5, 0.25, 1], + [0.5, 1, 0.25], + [0.25, 1, 0.5], + [0.25, 0.5, 1], + ].forEach(row => { + const accumulation = new ExponentialHistogramAccumulation([0, 0], 2); + row.forEach(value => { + accumulation.record(value); + }); + + assert.strictEqual(accumulation.scale(), -1); + assert.strictEqual(accumulation.positive().offset(), -2); + assert.strictEqual(accumulation.positive().length(), 2); + assert.strictEqual(accumulation.positive().at(0), 1); + assert.strictEqual(accumulation.positive().at(1), 2); + }); + }); + + // Tests a variety of ascending sequences, calculated using known + // index ranges. For example, with maxSize=3, using scale=0 and + // offset -5, add a sequence of numbers. Because the numbers have + // known range, we know the expected scale. + it('handles ascending sequences', () => { + for (const maxSize of [3, 4, 6, 9]) { + for (let offset = -5; offset <= 5; offset++) { + for (const initScale of [0, 4]) { + for (let step = maxSize; step < 4 * maxSize; step++) { + const accumulation = new ExponentialHistogramAccumulation( + [0, 0], + maxSize + ); + let mapper = getMapping(initScale); + + const minValue = centerValue(mapper, offset); + const maxValue = centerValue(mapper, offset + step); + let sum = 0.0; + + for (let i = 0; i < maxSize; i++) { + const value = centerValue(mapper, offset + i); + accumulation.record(value); + sum += value; + } + + assert.strictEqual(accumulation.scale(), initScale); + assert.strictEqual(accumulation.positive().offset(), offset); + + accumulation.record(maxValue); + sum += maxValue; + + // The zeroth bucket is not empty + assert.notStrictEqual(accumulation.positive().at(0), 0); + + // The maximum-index is at or above the midpoint, + // otherwise we downscaled too much. + + let maxFill = 0; + let totalCount = 0; + + for (let i = 0; i < accumulation.positive().length(); i++) { + totalCount += accumulation.positive().at(i); + if (accumulation.positive().at(i) !== 0) { + maxFill = 0; + } + } + assert.ok(maxFill >= maxSize / 2); + + // count is correct + assert.ok(maxSize + 1 >= totalCount); + assert.ok(maxSize + 1 >= accumulation.count()); + // sum is correct + assert.ok(sum >= accumulation.sum()); + + // the offset is correct at the computed scale + mapper = getMapping(accumulation.scale()); + let index = mapper.mapToIndex(minValue); + assert.strictEqual(accumulation.positive().offset(), index); + + // the maximum range is correct at the computed scale + index = mapper.mapToIndex(maxValue); + assert.strictEqual( + accumulation.positive().offset() + + accumulation.positive().length() - + 1, + index + ); + } + } + } + } + }); + }); + }); + describe('merge', () => { + it('handles simple (even) case', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v1 = 2 << i; + const v2 = 1 / (1 << i); + + acc0.record(v1); + acc1.record(v2); + acc2.record(v1); + acc2.record(v2); + } + + assert.strictEqual(acc0.scale(), 0); + assert.strictEqual(acc1.scale(), 0); + assert.strictEqual(acc2.scale(), -1); + + assert.strictEqual(acc0.positive().offset(), 0); + assert.strictEqual(acc1.positive().offset(), -4); + assert.strictEqual(acc2.positive().offset(), -2); + + assert.deepStrictEqual(getCounts(acc0.positive()), [1, 1, 1, 1]); + assert.deepStrictEqual(getCounts(acc1.positive()), [1, 1, 1, 1]); + assert.deepStrictEqual(getCounts(acc2.positive()), [2, 2, 2, 2]); + + acc0.merge(acc1); + + assert.strictEqual(acc0.scale(), -1); + assert.strictEqual(acc2.scale(), -1); + + assertHistogramsEqual(acc0, acc2); + }); + + it('handles simple (odd) case', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v1 = 2 << i; + const v2 = 2 / (1 << i); + + acc0.record(v1); + acc1.record(v2); + acc2.record(v1); + acc2.record(v2); + } + + assert.strictEqual(acc0.count(), 4); + assert.strictEqual(acc1.count(), 4); + assert.strictEqual(acc2.count(), 8); + + assert.strictEqual(acc0.scale(), 0); + assert.strictEqual(acc1.scale(), 0); + assert.strictEqual(acc2.scale(), -1); + + assert.strictEqual(acc0.positive().offset(), 0); + assert.strictEqual(acc1.positive().offset(), -3); + assert.strictEqual(acc2.positive().offset(), -2); + + assert.deepStrictEqual(getCounts(acc0.positive()), [1, 1, 1, 1]); + assert.deepStrictEqual(getCounts(acc1.positive()), [1, 1, 1, 1]); + assert.deepStrictEqual(getCounts(acc2.positive()), [1, 2, 3, 2]); + + acc0.merge(acc1); + + assert.strictEqual(acc0.scale(), -1); + assert.strictEqual(acc2.scale(), -1); + + assertHistogramsEqual(acc0, acc2); + }); + + it('handles exhaustive test case', () => { + const testMergeExhaustive = ( + a: number[], + b: number[], + size: number, + incr: number + ) => { + const aHist = new ExponentialHistogramAccumulation([0, 0], size); + const bHist = new ExponentialHistogramAccumulation([0, 0], size); + const cHist = new ExponentialHistogramAccumulation([0, 0], size); + + a.forEach(av => { + aHist.updateByIncrement(av, incr); + cHist.updateByIncrement(av, incr); + }); + b.forEach(bv => { + bHist.updateByIncrement(bv, incr); + cHist.updateByIncrement(bv, incr); + }); + + aHist.merge(bHist); + + assertHistogramsEqual(aHist, cHist); + }; + + const factor = 1024.0; + const count = 16; + const means = [0, factor]; + const stddevs = [1, factor]; + const sizes = [2, 6, 8, 9, 16]; + const increments = [1, 0x100, 0x10000, 0x100000000]; + + for (const mean of means) { + for (const stddev of stddevs) { + const values = Array.from( + { length: count }, + () => mean + Math.random() * stddev + ); + for (let part = 1; part < count; part++) { + for (const size of sizes) { + for (const incr of increments) { + testMergeExhaustive( + values.slice(0, part), + values.slice(part, count), + size, + incr + ); + } + } + } + } + } + }); + }); + describe('diff', () => { + it('handles simple case', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v1 = 2 << i; + + acc0.record(v1); + acc1.record(v1); + acc1.record(v1); + } + + assert.strictEqual(acc0.scale(), 0); + assert.strictEqual(acc1.scale(), 0); + + assert.strictEqual(acc0.positive().offset(), 0); + assert.strictEqual(acc1.positive().offset(), 0); + + assert.deepStrictEqual(getCounts(acc0.positive()), [1, 1, 1, 1]); + assert.deepStrictEqual(getCounts(acc1.positive()), [2, 2, 2, 2]); + + acc1.diff(acc0); + + assertHistogramsEqual(acc0, acc1); + }); + + it('trims trailing 0 buckets after diff', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v1 = 2 << i; + + if (i % 2 === 0) { + acc0.record(v1); + } + + if (i % 2 === 1) { + acc1.record(v1); + } + + acc2.record(v1); + } + + assert.strictEqual(acc0.scale(), 0); + assert.strictEqual(acc1.scale(), 0); + assert.strictEqual(acc2.scale(), 0); + + assert.strictEqual(acc0.positive().offset(), 0); + assert.strictEqual(acc1.positive().offset(), 1); + assert.strictEqual(acc2.positive().offset(), 0); + + assert.deepStrictEqual(getCounts(acc0.positive()), [1, 0, 1]); + assert.deepStrictEqual(getCounts(acc1.positive()), [1, 0, 1]); + assert.deepStrictEqual(getCounts(acc2.positive()), [1, 1, 1, 1]); + + acc2.diff(acc1); + + assertHistogramsEqual(acc0, acc2); + }); + + it('trims leading 0 buckets after diff', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v1 = 2 << i; + + if (i % 2 === 1) { + acc0.record(v1); + } + + if (i % 2 === 0) { + acc1.record(v1); + } + + acc2.record(v1); + } + + assert.strictEqual(acc0.scale(), 0); + assert.strictEqual(acc1.scale(), 0); + assert.strictEqual(acc2.scale(), 0); + + assert.strictEqual(acc0.positive().offset(), 1); + assert.strictEqual(acc1.positive().offset(), 0); + assert.strictEqual(acc2.positive().offset(), 0); + + assert.deepStrictEqual(getCounts(acc0.positive()), [1, 0, 1]); + assert.deepStrictEqual(getCounts(acc1.positive()), [1, 0, 1]); + assert.deepStrictEqual(getCounts(acc2.positive()), [1, 1, 1, 1]); + + acc2.diff(acc1); + assertHistogramsEqual(acc0, acc2); + }); + + it('handles all zero bucket case', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v1 = 2 << i; + + acc1.record(v1); + acc2.record(v1); + } + + assert.strictEqual(acc0.scale(), 0); + assert.strictEqual(acc1.scale(), 0); + assert.strictEqual(acc2.scale(), 0); + + assert.strictEqual(acc0.positive().offset(), 0); + assert.strictEqual(acc1.positive().offset(), 0); + assert.strictEqual(acc2.positive().offset(), 0); + + assert.deepStrictEqual(getCounts(acc0.positive()), []); + assert.deepStrictEqual(getCounts(acc1.positive()), [1, 1, 1, 1]); + assert.deepStrictEqual(getCounts(acc2.positive()), [1, 1, 1, 1]); + + acc2.diff(acc1); + assertHistogramsEqual(acc0, acc2); + }); + }); + describe('clone()', () => { + it('makes a deep copy', () => { + const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + const v = 2 << i; + acc0.record(v); + acc1.record(v); + } + + assertHistogramsEqual(acc0, acc1); + + const acc2 = acc0.clone(); + + assertHistogramsEqual(acc0, acc2); + assert.strictEqual(acc0.scale(), acc2.scale()); + assert.deepStrictEqual( + getCounts(acc0.positive()), + getCounts(acc2.positive()) + ); + + acc2.record(2 << 5); + + // no longer equal + assert.notStrictEqual(acc0.scale(), acc2.scale()); + assert.notDeepStrictEqual( + getCounts(acc0.positive()), + getCounts(acc2.positive()) + ); + + // ensure acc0 wasn't mutated + assertHistogramsEqual(acc0, acc1); + }); + }); + + describe('toPointValue()', () => { + it('returns representation of histogram internals', () => { + const acc = new ExponentialHistogramAccumulation([0, 0], 4); + + for (let i = 0; i < 4; i++) { + acc.record(2 << i); + } + + const pv = acc.toPointValue(); + + assert.strictEqual(pv.scale, acc.scale()); + assert.strictEqual(pv.min, acc.min()); + assert.strictEqual(pv.max, acc.max()); + assert.strictEqual(pv.sum, acc.sum()); + assert.strictEqual(pv.count, acc.count()); + assert.strictEqual(pv.zeroCount, acc.zeroCount()); + assert.strictEqual(pv.positive.offset, acc.positive().offset()); + assert.deepStrictEqual(pv.positive.bucketCounts, acc.positive().counts()); + assert.strictEqual(pv.negative.offset, acc.negative().offset()); + assert.deepStrictEqual(pv.negative.bucketCounts, acc.negative().counts()); + }); + }); + + describe('min max size', () => { + it('auto-corrects to min max', () => { + const acc: any = new ExponentialHistogramAccumulation([0, 0], 0); + assert.strictEqual( + acc['_maxSize'], + ExponentialHistogramAccumulation.MIN_MAX_SIZE + ); + }); + }); +}); + +describe('ExponentialHistogramAggregation', () => { + describe('merge', () => { + it('merges and does not mutate args', () => { + const agg = new ExponentialHistogramAggregator(4, true); + const acc0 = agg.createAccumulation([0, 0]); + const acc1 = agg.createAccumulation([0, 0]); + const acc2 = agg.createAccumulation([0, 0]); + + acc0.record(2 << 0); + acc0.record(2 << 1); + acc0.record(2 << 3); + + acc1.record(2 << 0); + acc1.record(2 << 2); + acc1.record(2 << 3); + + acc2.record(2 << 0); + acc2.record(2 << 0); + acc2.record(2 << 1); + acc2.record(2 << 2); + acc2.record(2 << 3); + acc2.record(2 << 3); + + // snapshots before merging + const acc0Snapshot = acc0.toPointValue(); + const acc1Snapshot = acc1.toPointValue(); + + const result = agg.merge(acc0, acc1); + + // merge is as expected + assertHistogramsEqual(result, acc2); + + // merged histos are not mutated + assert.deepStrictEqual(acc0.toPointValue(), acc0Snapshot); + assert.deepStrictEqual(acc1.toPointValue(), acc1Snapshot); + }); + }); + + describe('diff', () => { + it('diffs and does not mutate args', () => { + const agg = new ExponentialHistogramAggregator(4, true); + const acc0 = agg.createAccumulation([0, 0]); + const acc1 = agg.createAccumulation([0, 0]); + const acc2 = agg.createAccumulation([0, 0]); + + acc0.record(2 << 0); + acc0.record(2 << 1); + acc0.record(2 << 3); + + acc1.record(2 << 0); + acc1.record(2 << 0); + acc1.record(2 << 1); + acc1.record(2 << 2); + acc1.record(2 << 3); + acc1.record(2 << 3); + + acc2.record(2 << 0); + acc2.record(2 << 2); + acc2.record(2 << 3); + + // snapshots before diff + const acc0Snapshot = acc0.toPointValue(); + const acc1Snapshot = acc1.toPointValue(); + + const result = agg.diff(acc0, acc1); + + // diff as expected + assertHistogramsEqual(result, acc2); + + // diffed histos are not mutated + assert.deepStrictEqual(acc0.toPointValue(), acc0Snapshot); + assert.deepStrictEqual(acc1.toPointValue(), acc1Snapshot); + }); + }); + + describe('toMetricData', () => { + it('should transform to expected data with recordMinMax = true', () => { + const startTime: HrTime = [0, 0]; + const endTime: HrTime = [1, 1]; + + const agg = new ExponentialHistogramAggregator(4, true); + const acc = agg.createAccumulation(startTime); + + acc.record(2); + acc.record(-2); + acc.record(4); + acc.record(-4); + + const result = agg.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, acc]], + endTime + ); + + const expected: MetricData = { + descriptor: defaultInstrumentDescriptor, + aggregationTemporality: AggregationTemporality.CUMULATIVE, + dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM, + dataPoints: [ + { + attributes: {}, + startTime, + endTime, + value: { + min: -4, + max: 4, + sum: 0, + positive: { + offset: 1, + bucketCounts: [1, 0, 1], + }, + negative: { + offset: 1, + bucketCounts: [1, 0, 1], + }, + count: 4, + scale: 1, + zeroCount: 0, + }, + }, + ], + }; + + assert.deepStrictEqual(result, expected); + }); + + it('should transform to expected data with recordMinMax = false', () => { + const startTime: HrTime = [0, 0]; + const endTime: HrTime = [1, 1]; + + const agg = new ExponentialHistogramAggregator(4, false); + const acc = agg.createAccumulation(startTime); + + acc.record(2); + acc.record(-2); + acc.record(4); + acc.record(-4); + + const result = agg.toMetricData( + defaultInstrumentDescriptor, + AggregationTemporality.CUMULATIVE, + [[{}, acc]], + endTime + ); + + const expected: MetricData = { + descriptor: defaultInstrumentDescriptor, + aggregationTemporality: AggregationTemporality.CUMULATIVE, + dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM, + dataPoints: [ + { + attributes: {}, + startTime, + endTime, + value: { + min: undefined, + max: undefined, + sum: 0, + positive: { + offset: 1, + bucketCounts: [1, 0, 1], + }, + negative: { + offset: 1, + bucketCounts: [1, 0, 1], + }, + count: 4, + scale: 1, + zeroCount: 0, + }, + }, + ], + }; + + assert.deepStrictEqual(result, expected); + }); + + function testSum(instrumentType: InstrumentType, expectSum: boolean) { + const agg = new ExponentialHistogramAggregator(4, true); + + const startTime: HrTime = [0, 0]; + const endTime: HrTime = [1, 1]; + + const acc = agg.createAccumulation(startTime); + acc.record(0); + acc.record(1); + acc.record(4); + + const aggregatedData = agg.toMetricData( + { + name: 'default_metric', + description: 'a simple instrument', + type: instrumentType, + unit: '1', + valueType: ValueType.DOUBLE, + }, + AggregationTemporality.CUMULATIVE, + [[{}, acc]], + endTime + ); + + assert.notStrictEqual(aggregatedData, undefined); + assert.strictEqual( + aggregatedData?.dataPoints[0].value.sum, + expectSum ? 5 : undefined + ); + } + + describe('should have undefined sum when used with', () => { + it('UpDownCounter', () => testSum(InstrumentType.UP_DOWN_COUNTER, false)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, false)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.OBSERVABLE_GAUGE, false)); + }); + + describe('should include sum with', () => { + it('UpDownCounter', () => testSum(InstrumentType.COUNTER, true)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.HISTOGRAM, true)); + it('ObservableUpDownCounter', () => + testSum(InstrumentType.OBSERVABLE_COUNTER, true)); + }); + }); +}); + +function getCounts(buckets: Buckets): Array { + const counts = new Array(buckets.length()); + for (let i = 0; i < buckets.length(); i++) { + counts[i] = buckets.at(i); + } + return counts; +} + +function centerValue(mapper: Mapping, x: number): number { + const lower = mapper.lowerBoundary(x); + const upper = mapper.lowerBoundary(x + 1); + return (lower + upper) / 2; +} + +function getMapping(scale: number): Mapping { + if (scale <= 0) { + return ExponentMapping.get(scale); + } else { + return LogarithmMapping.get(scale); + } +} + +function assertHistogramsEqual( + actual: ExponentialHistogramAccumulation, + expected: ExponentialHistogramAccumulation +) { + const actualSum = actual.sum(); + const expectedSum = expected.sum(); + + if (actualSum === 0 || expectedSum === 0) { + assertInDelta(actualSum, expectedSum, 1e-6); + } else { + assertInEpsilon(actualSum, expectedSum, 1e-6); + } + + assert.strictEqual(actual.count(), expected.count()); + assert.strictEqual(actual.zeroCount(), expected.zeroCount()); + assert.strictEqual(actual.scale(), expected.scale()); + + assert.strictEqual( + bucketsToString(actual.positive()), + bucketsToString(expected.positive()) + ); + + assert.strictEqual( + bucketsToString(actual.negative()), + bucketsToString(expected.negative()) + ); +} + +function bucketsToString(buckets: Buckets): string { + let str = `[@${buckets.offset()}`; + for (let i = 0; i < buckets.length(); i++) { + str += buckets.at(i).toString(); + } + str += ']\n'; + return str; +}