From f67f0310f9d5446bea9ebacb8b02c0fc6c7c9fd3 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Thu, 1 Oct 2020 17:27:33 +0200 Subject: [PATCH 01/10] chore: updating submodule for opentelemetry-proto --- packages/opentelemetry-exporter-collector-grpc/protos | 2 +- packages/opentelemetry-exporter-collector-proto/protos | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/opentelemetry-exporter-collector-grpc/protos b/packages/opentelemetry-exporter-collector-grpc/protos index e43e1abc40..313a868be2 160000 --- a/packages/opentelemetry-exporter-collector-grpc/protos +++ b/packages/opentelemetry-exporter-collector-grpc/protos @@ -1 +1 @@ -Subproject commit e43e1abc40428a6ee98e3bfd79bec1dfa2ed18cd +Subproject commit 313a868be259dce6c6516dd417d3ad5fd3321acf diff --git a/packages/opentelemetry-exporter-collector-proto/protos b/packages/opentelemetry-exporter-collector-proto/protos index e43e1abc40..313a868be2 160000 --- a/packages/opentelemetry-exporter-collector-proto/protos +++ b/packages/opentelemetry-exporter-collector-proto/protos @@ -1 +1 @@ -Subproject commit e43e1abc40428a6ee98e3bfd79bec1dfa2ed18cd +Subproject commit 313a868be259dce6c6516dd417d3ad5fd3321acf From 2325595edb51c7a68566ff8ff8f6adddf6174930 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Thu, 1 Oct 2020 20:03:40 +0200 Subject: [PATCH 02/10] chore: necessary changes after upgrade to proto ver. 0.5.0, aligning metrics to latest spec changes --- .../docker/docker-compose.yaml | 2 +- examples/collector-exporter-node/package.json | 3 +- lerna.json | 2 + .../opentelemetry-api/src/metrics/Metric.ts | 5 + .../test/CollectorMetricExporter.test.ts | 35 +- .../test/helper.ts | 292 +++++-------- .../test/CollectorMetricExporter.test.ts | 39 +- .../test/helper.ts | 295 +++++-------- .../README.md | 2 +- .../src/transformMetrics.ts | 197 +++------ .../src/types.ts | 203 ++++++--- .../browser/CollectorMetricExporter.test.ts | 58 +-- .../common/CollectorMetricExporter.test.ts | 6 +- .../test/common/transformMetrics.test.ts | 123 ++---- .../test/helper.ts | 410 +++++++----------- .../test/node/CollectorMetricExporter.test.ts | 33 +- .../src/PrometheusSerializer.ts | 35 -- .../test/PrometheusExporter.test.ts | 20 +- .../test/PrometheusSerializer.test.ts | 89 ++-- .../src/export/Batcher.ts | 7 +- .../src/export/ConsoleMetricExporter.ts | 14 +- .../{MinMaxLastSumCount.ts => LastValue.ts} | 39 +- .../src/export/aggregators/index.ts | 2 +- .../opentelemetry-metrics/src/export/types.ts | 27 +- .../opentelemetry-metrics/test/Meter.test.ts | 137 ++---- 25 files changed, 830 insertions(+), 1245 deletions(-) rename packages/opentelemetry-metrics/src/export/aggregators/{MinMaxLastSumCount.ts => LastValue.ts} (50%) diff --git a/examples/collector-exporter-node/docker/docker-compose.yaml b/examples/collector-exporter-node/docker/docker-compose.yaml index 0dfe1a23f7..c04c16a124 100644 --- a/examples/collector-exporter-node/docker/docker-compose.yaml +++ b/examples/collector-exporter-node/docker/docker-compose.yaml @@ -2,8 +2,8 @@ version: "3" services: # Collector collector: +# image: otel/opentelemetry-collector:0.12.0 image: otel/opentelemetry-collector:latest -# image: otel/opentelemetry-collector:0.6.0 command: ["--config=/conf/collector-config.yaml", "--log-level=DEBUG"] volumes: - ./collector-config.yaml:/conf/collector-config.yaml diff --git a/examples/collector-exporter-node/package.json b/examples/collector-exporter-node/package.json index 23bfcce4a5..96601ff0b4 100644 --- a/examples/collector-exporter-node/package.json +++ b/examples/collector-exporter-node/package.json @@ -34,7 +34,8 @@ "@opentelemetry/exporter-collector-grpc": "^0.11.0", "@opentelemetry/exporter-collector-proto": "^0.11.0", "@opentelemetry/metrics": "^0.11.0", - "@opentelemetry/tracing": "^0.11.0" + "@opentelemetry/tracing": "^0.11.0", + "grpc": "^1.24.2" }, "homepage": "https://github.com/open-telemetry/opentelemetry-js#readme" } diff --git a/lerna.json b/lerna.json index 4e651f4a2c..e78656cafa 100644 --- a/lerna.json +++ b/lerna.json @@ -2,6 +2,8 @@ "lerna": "3.13.4", "npmClient": "npm", "packages": [ + "examples/metrics", + "examples/collector-exporter-node", "benchmark/*", "backwards-compatability/*", "metapackages/*", diff --git a/packages/opentelemetry-api/src/metrics/Metric.ts b/packages/opentelemetry-api/src/metrics/Metric.ts index 0022c2a5b6..083055fbed 100644 --- a/packages/opentelemetry-api/src/metrics/Metric.ts +++ b/packages/opentelemetry-api/src/metrics/Metric.ts @@ -59,6 +59,11 @@ export interface MetricOptions { * User provided logger. */ logger?: Logger; + + /** + * Boundaries optional for histogram + */ + boundaries?: number[]; } export interface BatchMetricOptions extends MetricOptions { diff --git a/packages/opentelemetry-exporter-collector-grpc/test/CollectorMetricExporter.test.ts b/packages/opentelemetry-exporter-collector-grpc/test/CollectorMetricExporter.test.ts index 901093f6b6..1fec33a373 100644 --- a/packages/opentelemetry-exporter-collector-grpc/test/CollectorMetricExporter.test.ts +++ b/packages/opentelemetry-exporter-collector-grpc/test/CollectorMetricExporter.test.ts @@ -27,12 +27,10 @@ import { CollectorMetricExporter } from '../src'; import { mockCounter, mockObserver, - mockHistogram, ensureExportedCounterIsCorrect, ensureExportedObserverIsCorrect, ensureMetadataIsCorrect, ensureResourceIsCorrect, - ensureExportedHistogramIsCorrect, ensureExportedValueRecorderIsCorrect, mockValueRecorder, } from './helper'; @@ -117,7 +115,7 @@ const testCollectorMetricExporter = (params: TestParams) => server.forceShutdown(); }); - beforeEach(done => { + beforeEach(async () => { const credentials = params.useTLS ? grpc.credentials.createSsl( fs.readFileSync('./test/certs/ca.crt'), @@ -136,10 +134,9 @@ const testCollectorMetricExporter = (params: TestParams) => value: 1592602232694000000, }); metrics = []; - metrics.push(mockCounter()); - metrics.push(mockObserver()); - metrics.push(mockHistogram()); - metrics.push(mockValueRecorder()); + metrics.push(await mockCounter()); + metrics.push(await mockObserver()); + metrics.push(await mockValueRecorder()); metrics[0].aggregator.update(1); @@ -148,8 +145,6 @@ const testCollectorMetricExporter = (params: TestParams) => metrics[2].aggregator.update(7); metrics[2].aggregator.update(14); - metrics[3].aggregator.update(5); - done(); }); afterEach(() => { @@ -189,15 +184,21 @@ const testCollectorMetricExporter = (params: TestParams) => const counter = exportedData[0].instrumentationLibraryMetrics[0].metrics[0]; const observer = - exportedData[1].instrumentationLibraryMetrics[0].metrics[0]; - const histogram = - exportedData[2].instrumentationLibraryMetrics[0].metrics[0]; + exportedData[0].instrumentationLibraryMetrics[0].metrics[1]; const recorder = - exportedData[3].instrumentationLibraryMetrics[0].metrics[0]; - ensureExportedCounterIsCorrect(counter); - ensureExportedObserverIsCorrect(observer); - ensureExportedHistogramIsCorrect(histogram); - ensureExportedValueRecorderIsCorrect(recorder); + exportedData[0].instrumentationLibraryMetrics[0].metrics[2]; + ensureExportedCounterIsCorrect( + counter, + counter.intSum?.dataPoints[0].timeUnixNano + ); + ensureExportedObserverIsCorrect( + observer, + observer.doubleGauge?.dataPoints[0].timeUnixNano + ); + ensureExportedValueRecorderIsCorrect( + recorder, + recorder.intHistogram?.dataPoints[0].timeUnixNano + ); assert.ok( typeof resource !== 'undefined', "resource doesn't exist" diff --git a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts index efdf6c811e..a7b3be8bee 100644 --- a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts @@ -19,15 +19,20 @@ import { ReadableSpan } from '@opentelemetry/tracing'; import { Resource } from '@opentelemetry/resources'; import { collectorTypes } from '@opentelemetry/exporter-collector'; import * as assert from 'assert'; -import { - MetricRecord, - MetricKind, - SumAggregator, - MinMaxLastSumCountAggregator, - HistogramAggregator, -} from '@opentelemetry/metrics'; +import { MetricRecord, MeterProvider } from '@opentelemetry/metrics'; import * as grpc from 'grpc'; +const meterProvider = new MeterProvider({ + interval: 30000, + resource: new Resource({ + service: 'ui', + version: 1, + cost: 112.12, + }), +}); + +const meter = meterProvider.getMeter('default', '0.0.1'); + const traceIdArr = [ 31, 16, @@ -49,104 +54,61 @@ const traceIdArr = [ const spanIdArr = [94, 16, 114, 97, 246, 79, 165, 62]; const parentIdArr = [120, 168, 145, 80, 152, 134, 67, 136]; -export function mockCounter(): MetricRecord { - return { - descriptor: { - name: 'test-counter', +export async function mockCounter(): Promise { + const name = 'int-counter'; + const metric = + meter['_metrics'].get(name) || + meter.createCounter(name, { description: 'sample counter description', - unit: '1', - metricKind: MetricKind.COUNTER, valueType: ValueType.INT, - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockDoubleCounter(): MetricRecord { - return { - descriptor: { - name: 'test-counter', +export async function mockDoubleCounter(): Promise { + const name = 'double-counter'; + const metric = + meter['_metrics'].get(name) || + meter.createCounter(name, { description: 'sample counter description', - unit: '1', - metricKind: MetricKind.COUNTER, valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockObserver(): MetricRecord { - return { - descriptor: { - name: 'test-observer', +export async function mockObserver(): Promise { + const name = 'double-observer'; + const metric = + meter['_metrics'].get(name) || + meter.createValueObserver(name, { description: 'sample observer description', - unit: '2', - metricKind: MetricKind.VALUE_OBSERVER, valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new MinMaxLastSumCountAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockValueRecorder(): MetricRecord { - return { - descriptor: { - name: 'test-recorder', +export async function mockValueRecorder(): Promise { + const name = 'int-recorder'; + const metric = + meter['_metrics'].get(name) || + meter.createValueRecorder(name, { description: 'sample recorder description', - unit: '3', - metricKind: MetricKind.VALUE_RECORDER, valueType: ValueType.INT, - }, - labels: {}, - aggregator: new MinMaxLastSumCountAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; -} + boundaries: [0, 100], + }); + metric.clear(); + metric.bind({}); -export function mockHistogram(): MetricRecord { - return { - descriptor: { - name: 'test-hist', - description: 'sample observer description', - unit: '2', - metricKind: MetricKind.VALUE_OBSERVER, - valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new HistogramAggregator([10, 20]), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + return (await metric.getMetricRecord())[0]; } export const mockedReadableSpan: ReadableSpan = { @@ -335,7 +297,7 @@ export function ensureExportedSpanIsCorrect( 'parentIdArr is wrong' ); assert.strictEqual(span.name, 'documentFetch', 'name is wrong'); - assert.strictEqual(span.kind, 'INTERNAL', 'kind is wrong'); + assert.strictEqual(span.kind, 'SPAN_KIND_INTERNAL', 'kind is wrong'); assert.strictEqual( span.startTimeUnixNano, '1574120165429803008', @@ -355,118 +317,84 @@ export function ensureExportedSpanIsCorrect( assert.strictEqual(span.droppedLinksCount, 0, 'droppedLinksCount is wrong'); assert.deepStrictEqual( span.status, - { code: 'Ok', message: '' }, + { code: 'STATUS_CODE_OK', message: '' }, 'status is wrong' ); } export function ensureExportedCounterIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric + metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, + time?: number ) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-counter', + assert.deepStrictEqual(metric, { + name: 'int-counter', description: 'sample counter description', unit: '1', - type: 'MONOTONIC_INT64', - temporality: 'CUMULATIVE', + data: 'intSum', + intSum: { + dataPoints: [ + { + labels: [], + exemplars: [], + value: '1', + startTimeUnixNano: '1592602232694000128', + timeUnixNano: String(time), + }, + ], + isMonotonic: true, + aggregationTemporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE', + }, }); - assert.deepStrictEqual(metric.doubleDataPoints, []); - assert.deepStrictEqual(metric.summaryDataPoints, []); - assert.deepStrictEqual(metric.histogramDataPoints, []); - assert.ok(metric.int64DataPoints); - assert.deepStrictEqual(metric.int64DataPoints[0].labels, []); - assert.deepStrictEqual(metric.int64DataPoints[0].value, '1'); - assert.deepStrictEqual( - metric.int64DataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); } export function ensureExportedObserverIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric -) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-observer', - description: 'sample observer description', - unit: '2', - type: 'SUMMARY', - temporality: 'DELTA', - }); - - assert.deepStrictEqual(metric.int64DataPoints, []); - assert.deepStrictEqual(metric.doubleDataPoints, []); - assert.deepStrictEqual(metric.histogramDataPoints, []); - assert.ok(metric.summaryDataPoints); - assert.deepStrictEqual(metric.summaryDataPoints[0].labels, []); - assert.deepStrictEqual(metric.summaryDataPoints[0].sum, 9); - assert.deepStrictEqual(metric.summaryDataPoints[0].count, '2'); - assert.deepStrictEqual( - metric.summaryDataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); - assert.deepStrictEqual(metric.summaryDataPoints[0].percentileValues, [ - { percentile: 0, value: 3 }, - { percentile: 100, value: 6 }, - ]); -} - -export function ensureExportedHistogramIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric + metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, + time?: number ) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-hist', + assert.deepStrictEqual(metric, { + name: 'double-observer', description: 'sample observer description', - unit: '2', - type: 'HISTOGRAM', - temporality: 'DELTA', + unit: '1', + data: 'doubleGauge', + doubleGauge: { + dataPoints: [ + { + labels: [], + exemplars: [], + value: 6, + startTimeUnixNano: '1592602232694000128', + timeUnixNano: String(time), + }, + ], + }, }); - assert.deepStrictEqual(metric.int64DataPoints, []); - assert.deepStrictEqual(metric.summaryDataPoints, []); - assert.deepStrictEqual(metric.doubleDataPoints, []); - assert.ok(metric.histogramDataPoints); - assert.deepStrictEqual(metric.histogramDataPoints[0].labels, []); - assert.deepStrictEqual(metric.histogramDataPoints[0].count, '2'); - assert.deepStrictEqual(metric.histogramDataPoints[0].sum, 21); - assert.deepStrictEqual(metric.histogramDataPoints[0].buckets, [ - { count: '1', exemplar: null }, - { count: '1', exemplar: null }, - { count: '0', exemplar: null }, - ]); - assert.deepStrictEqual(metric.histogramDataPoints[0].explicitBounds, [ - 10, - 20, - ]); - assert.deepStrictEqual( - metric.histogramDataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); } export function ensureExportedValueRecorderIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric + metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, + time?: number ) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-recorder', + assert.deepStrictEqual(metric, { + name: 'int-recorder', description: 'sample recorder description', - unit: '3', - type: 'SUMMARY', - temporality: 'DELTA', + unit: '1', + data: 'intHistogram', + intHistogram: { + dataPoints: [ + { + labels: [], + exemplars: [], + sum: '21', + count: '2', + startTimeUnixNano: '1592602232694000128', + timeUnixNano: String(time), + bucketCounts: ['2', '0'], + explicitBounds: [Infinity], + }, + ], + aggregationTemporality: 'AGGREGATION_TEMPORALITY_DELTA', + }, }); - assert.deepStrictEqual(metric.histogramDataPoints, []); - assert.deepStrictEqual(metric.int64DataPoints, []); - assert.deepStrictEqual(metric.doubleDataPoints, []); - assert.ok(metric.summaryDataPoints); - assert.deepStrictEqual(metric.summaryDataPoints[0].labels, []); - assert.deepStrictEqual( - metric.summaryDataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); - assert.deepStrictEqual(metric.summaryDataPoints[0].percentileValues, [ - { percentile: 0, value: 5 }, - { percentile: 100, value: 5 }, - ]); - assert.deepStrictEqual(metric.summaryDataPoints[0].count, '1'); - assert.deepStrictEqual(metric.summaryDataPoints[0].sum, 5); } export function ensureResourceIsCorrect( diff --git a/packages/opentelemetry-exporter-collector-proto/test/CollectorMetricExporter.test.ts b/packages/opentelemetry-exporter-collector-proto/test/CollectorMetricExporter.test.ts index 83f3f7a101..cac6e44b16 100644 --- a/packages/opentelemetry-exporter-collector-proto/test/CollectorMetricExporter.test.ts +++ b/packages/opentelemetry-exporter-collector-proto/test/CollectorMetricExporter.test.ts @@ -15,7 +15,6 @@ */ import { collectorTypes } from '@opentelemetry/exporter-collector'; - import * as core from '@opentelemetry/core'; import * as http from 'http'; import * as assert from 'assert'; @@ -26,12 +25,10 @@ import { getExportRequestProto } from '../src/util'; import { mockCounter, mockObserver, - mockHistogram, ensureExportMetricsServiceRequestIsSet, mockValueRecorder, ensureExportedCounterIsCorrect, ensureExportedObserverIsCorrect, - ensureExportedHistogramIsCorrect, ensureExportedValueRecorderIsCorrect, } from './helper'; import { MetricRecord } from '@opentelemetry/metrics'; @@ -60,7 +57,7 @@ describe('CollectorMetricExporter - node with proto over http', () => { let spyWrite: sinon.SinonSpy; let metrics: MetricRecord[]; describe('export', () => { - beforeEach(() => { + beforeEach(async () => { spyRequest = sinon.stub(http, 'request').returns(fakeRequest as any); spyWrite = sinon.stub(fakeRequest, 'write'); collectorExporterConfig = { @@ -79,16 +76,14 @@ describe('CollectorMetricExporter - node with proto over http', () => { value: 1592602232694000000, }); metrics = []; - metrics.push(mockCounter()); - metrics.push(mockObserver()); - metrics.push(mockHistogram()); - metrics.push(mockValueRecorder()); + metrics.push(await mockCounter()); + metrics.push(await mockObserver()); + metrics.push(await mockValueRecorder()); metrics[0].aggregator.update(1); metrics[1].aggregator.update(3); metrics[1].aggregator.update(6); metrics[2].aggregator.update(7); metrics[2].aggregator.update(14); - metrics[3].aggregator.update(5); }); afterEach(() => { spyRequest.restore(); @@ -132,22 +127,28 @@ describe('CollectorMetricExporter - node with proto over http', () => { const metric1 = json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[0]; const metric2 = - json.resourceMetrics[1].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[1]; const metric3 = - json.resourceMetrics[2].instrumentationLibraryMetrics[0].metrics[0]; - const metric4 = - json.resourceMetrics[3].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[2]; + assert.ok(typeof metric1 !== 'undefined', "counter doesn't exist"); - ensureExportedCounterIsCorrect(metric1); + ensureExportedCounterIsCorrect( + metric1, + metric1.intSum?.dataPoints[0].timeUnixNano + ); assert.ok(typeof metric2 !== 'undefined', "observer doesn't exist"); - ensureExportedObserverIsCorrect(metric2); - assert.ok(typeof metric3 !== 'undefined', "histogram doesn't exist"); - ensureExportedHistogramIsCorrect(metric3); + ensureExportedObserverIsCorrect( + metric2, + metric2.doubleGauge?.dataPoints[0].timeUnixNano + ); assert.ok( - typeof metric4 !== 'undefined', + typeof metric3 !== 'undefined', "value recorder doesn't exist" ); - ensureExportedValueRecorderIsCorrect(metric4); + ensureExportedValueRecorderIsCorrect( + metric3, + metric3.intHistogram?.dataPoints[0].timeUnixNano + ); ensureExportMetricsServiceRequestIsSet(json); diff --git a/packages/opentelemetry-exporter-collector-proto/test/helper.ts b/packages/opentelemetry-exporter-collector-proto/test/helper.ts index dd66fea994..25f613b51d 100644 --- a/packages/opentelemetry-exporter-collector-proto/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-proto/test/helper.ts @@ -19,112 +19,74 @@ import { ReadableSpan } from '@opentelemetry/tracing'; import { Resource } from '@opentelemetry/resources'; import { collectorTypes } from '@opentelemetry/exporter-collector'; import * as assert from 'assert'; -import { - MetricRecord, - MetricKind, - SumAggregator, - MinMaxLastSumCountAggregator, - HistogramAggregator, -} from '@opentelemetry/metrics'; +import { MeterProvider, MetricRecord } from '@opentelemetry/metrics'; -export function mockCounter(): MetricRecord { - return { - descriptor: { - name: 'test-counter', +const meterProvider = new MeterProvider({ + interval: 30000, + resource: new Resource({ + service: 'ui', + version: 1, + cost: 112.12, + }), +}); + +const meter = meterProvider.getMeter('default', '0.0.1'); + +export async function mockCounter(): Promise { + const name = 'int-counter'; + const metric = + meter['_metrics'].get(name) || + meter.createCounter(name, { description: 'sample counter description', - unit: '1', - metricKind: MetricKind.COUNTER, valueType: ValueType.INT, - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockDoubleCounter(): MetricRecord { - return { - descriptor: { - name: 'test-counter', +export async function mockDoubleCounter(): Promise { + const name = 'double-counter'; + const metric = + meter['_metrics'].get(name) || + meter.createCounter(name, { description: 'sample counter description', - unit: '1', - metricKind: MetricKind.COUNTER, valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockObserver(): MetricRecord { - return { - descriptor: { - name: 'test-observer', +export async function mockObserver(): Promise { + const name = 'double-observer'; + const metric = + meter['_metrics'].get(name) || + meter.createValueObserver(name, { description: 'sample observer description', - unit: '2', - metricKind: MetricKind.VALUE_OBSERVER, valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new MinMaxLastSumCountAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockValueRecorder(): MetricRecord { - return { - descriptor: { - name: 'test-recorder', +export async function mockValueRecorder(): Promise { + const name = 'int-recorder'; + const metric = + meter['_metrics'].get(name) || + meter.createValueRecorder(name, { description: 'sample recorder description', - unit: '3', - metricKind: MetricKind.VALUE_RECORDER, valueType: ValueType.INT, - }, - labels: {}, - aggregator: new MinMaxLastSumCountAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; -} + boundaries: [0, 100], + }); + metric.clear(); + metric.bind({}); -export function mockHistogram(): MetricRecord { - return { - descriptor: { - name: 'test-hist', - description: 'sample observer description', - unit: '2', - metricKind: MetricKind.VALUE_OBSERVER, - valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new HistogramAggregator([10, 20]), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + return (await metric.getMetricRecord())[0]; } const traceIdBase64 = 'HxAI3I4nDoXECg18OTmyeA=='; @@ -297,7 +259,7 @@ export function ensureProtoSpanIsCorrect( 'parentIdArr is wrong' ); assert.strictEqual(span.name, 'documentFetch', 'name is wrong'); - assert.strictEqual(span.kind, 'INTERNAL', 'kind is wrong'); + assert.strictEqual(span.kind, 'SPAN_KIND_INTERNAL', 'kind is wrong'); assert.strictEqual( span.startTimeUnixNano, '1574120165429803008', @@ -315,116 +277,77 @@ export function ensureProtoSpanIsCorrect( ); assert.strictEqual(span.droppedEventsCount, 0, 'droppedEventsCount is wrong'); assert.strictEqual(span.droppedLinksCount, 0, 'droppedLinksCount is wrong'); - assert.deepStrictEqual(span.status, { code: 'Ok' }, 'status is wrong'); + assert.deepStrictEqual( + span.status, + { code: 'STATUS_CODE_OK' }, + 'status is wrong' + ); } export function ensureExportedCounterIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric + metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, + time?: number ) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-counter', + assert.deepStrictEqual(metric, { + name: 'int-counter', description: 'sample counter description', unit: '1', - type: 'MONOTONIC_INT64', - temporality: 'CUMULATIVE', + intSum: { + dataPoints: [ + { + value: '1', + startTimeUnixNano: '1592602232694000128', + timeUnixNano: String(time), + }, + ], + isMonotonic: true, + aggregationTemporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE', + }, }); - assert.deepStrictEqual(metric.doubleDataPoints, undefined); - assert.deepStrictEqual(metric.summaryDataPoints, undefined); - assert.deepStrictEqual(metric.histogramDataPoints, undefined); - assert.ok(metric.int64DataPoints); - assert.deepStrictEqual(metric.int64DataPoints[0].labels, undefined); - assert.deepStrictEqual(metric.int64DataPoints[0].value, '1'); - assert.deepStrictEqual( - metric.int64DataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); } export function ensureExportedObserverIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric + metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, + time?: number ) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-observer', + assert.deepStrictEqual(metric, { + name: 'double-observer', description: 'sample observer description', - unit: '2', - type: 'SUMMARY', - temporality: 'DELTA', - }); - - assert.deepStrictEqual(metric.int64DataPoints, undefined); - assert.deepStrictEqual(metric.doubleDataPoints, undefined); - assert.deepStrictEqual(metric.histogramDataPoints, undefined); - assert.ok(metric.summaryDataPoints); - assert.deepStrictEqual(metric.summaryDataPoints[0].labels, undefined); - assert.deepStrictEqual(metric.summaryDataPoints[0].sum, 9); - assert.deepStrictEqual(metric.summaryDataPoints[0].count, '2'); - assert.deepStrictEqual( - metric.summaryDataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); - assert.deepStrictEqual(metric.summaryDataPoints[0].percentileValues, [ - { percentile: 0, value: 3 }, - { percentile: 100, value: 6 }, - ]); -} - -export function ensureExportedHistogramIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric -) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-hist', - description: 'sample observer description', - unit: '2', - type: 'HISTOGRAM', - temporality: 'DELTA', + unit: '1', + doubleGauge: { + dataPoints: [ + { + value: 6, + startTimeUnixNano: '1592602232694000128', + timeUnixNano: String(time), + }, + ], + }, }); - assert.deepStrictEqual(metric.int64DataPoints, undefined); - assert.deepStrictEqual(metric.summaryDataPoints, undefined); - assert.deepStrictEqual(metric.doubleDataPoints, undefined); - assert.ok(metric.histogramDataPoints); - assert.deepStrictEqual(metric.histogramDataPoints[0].labels, undefined); - assert.deepStrictEqual(metric.histogramDataPoints[0].count, '2'); - assert.deepStrictEqual(metric.histogramDataPoints[0].sum, 21); - assert.deepStrictEqual(metric.histogramDataPoints[0].buckets, [ - { count: '1' }, - { count: '1' }, - { count: '0' }, - ]); - assert.deepStrictEqual(metric.histogramDataPoints[0].explicitBounds, [ - 10, - 20, - ]); - assert.deepStrictEqual( - metric.histogramDataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); } export function ensureExportedValueRecorderIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric + metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, + time?: number ) { - assert.deepStrictEqual(metric.metricDescriptor, { - name: 'test-recorder', + assert.deepStrictEqual(metric, { + name: 'int-recorder', description: 'sample recorder description', - unit: '3', - type: 'SUMMARY', - temporality: 'DELTA', + unit: '1', + intHistogram: { + dataPoints: [ + { + sum: '21', + count: '2', + startTimeUnixNano: '1592602232694000128', + timeUnixNano: time, + bucketCounts: ['2', '0'], + explicitBounds: ['Infinity'], + }, + ], + aggregationTemporality: 'AGGREGATION_TEMPORALITY_DELTA', + }, }); - assert.deepStrictEqual(metric.histogramDataPoints, undefined); - assert.deepStrictEqual(metric.int64DataPoints, undefined); - assert.deepStrictEqual(metric.doubleDataPoints, undefined); - assert.ok(metric.summaryDataPoints); - assert.deepStrictEqual(metric.summaryDataPoints[0].labels, undefined); - assert.deepStrictEqual( - metric.summaryDataPoints[0].startTimeUnixNano, - '1592602232694000128' - ); - assert.deepStrictEqual(metric.summaryDataPoints[0].percentileValues, [ - { percentile: 0, value: 5 }, - { percentile: 100, value: 5 }, - ]); - assert.deepStrictEqual(metric.summaryDataPoints[0].count, '1'); - assert.deepStrictEqual(metric.summaryDataPoints[0].sum, 5); } export function ensureExportTraceServiceRequestIsSet( @@ -466,7 +389,7 @@ export function ensureExportMetricsServiceRequestIsSet( const resourceMetrics = json.resourceMetrics; assert.strictEqual( resourceMetrics.length, - 4, + 1, 'resourceMetrics has incorrect length' ); @@ -489,8 +412,6 @@ export function ensureExportMetricsServiceRequestIsSet( 'instrumentationLibrary is missing' ); - const metric1 = resourceMetrics[0].instrumentationLibraryMetrics[0].metrics; - const metric2 = resourceMetrics[1].instrumentationLibraryMetrics[0].metrics; - assert.strictEqual(metric1.length, 1, 'Metrics are missing'); - assert.strictEqual(metric2.length, 1, 'Metrics are missing'); + const metrics = resourceMetrics[0].instrumentationLibraryMetrics[0].metrics; + assert.strictEqual(metrics.length, 3, 'Metrics are missing'); } diff --git a/packages/opentelemetry-exporter-collector/README.md b/packages/opentelemetry-exporter-collector/README.md index 076c817ce1..d0a6ffb90f 100644 --- a/packages/opentelemetry-exporter-collector/README.md +++ b/packages/opentelemetry-exporter-collector/README.md @@ -6,7 +6,7 @@ [![devDependencies][devDependencies-image]][devDependencies-url] [![Apache License][license-image]][license-image] -This module provides exporter for web and node to be used with [opentelemetry-collector][opentelemetry-collector-url] - last tested with version **0.6.0**. +This module provides exporter for web and node to be used with [opentelemetry-collector][opentelemetry-collector-url] - last tested with version **0.12.0**. ## Installation diff --git a/packages/opentelemetry-exporter-collector/src/transformMetrics.ts b/packages/opentelemetry-exporter-collector/src/transformMetrics.ts index c35609860f..522188e24e 100644 --- a/packages/opentelemetry-exporter-collector/src/transformMetrics.ts +++ b/packages/opentelemetry-exporter-collector/src/transformMetrics.ts @@ -17,10 +17,8 @@ import { MetricRecord, MetricKind, - HistogramAggregator, - MinMaxLastSumCountAggregator, Histogram, - Distribution, + AggregatorKind, } from '@opentelemetry/metrics'; import { opentelemetryProto, CollectorExporterConfigBase } from './types'; import * as api from '@opentelemetry/api'; @@ -42,99 +40,47 @@ export function toCollectorLabels( }); } -/** - * Given a MetricDescriptor, return its type in a compatible format with the collector - * @param descriptor - */ -export function toCollectorType( - metric: MetricRecord -): opentelemetryProto.metrics.v1.MetricDescriptorType { - if ( - metric.descriptor.metricKind === MetricKind.COUNTER || - metric.descriptor.metricKind === MetricKind.SUM_OBSERVER - ) { - if (metric.descriptor.valueType === api.ValueType.INT) { - return opentelemetryProto.metrics.v1.MetricDescriptorType.MONOTONIC_INT64; - } - return opentelemetryProto.metrics.v1.MetricDescriptorType.MONOTONIC_DOUBLE; - } - if (metric.aggregator instanceof HistogramAggregator) { - return opentelemetryProto.metrics.v1.MetricDescriptorType.HISTOGRAM; - } - if (metric.aggregator instanceof MinMaxLastSumCountAggregator) { - return opentelemetryProto.metrics.v1.MetricDescriptorType.SUMMARY; - } - if (metric.descriptor.valueType == api.ValueType.INT) { - return opentelemetryProto.metrics.v1.MetricDescriptorType.INT64; - } - if (metric.descriptor.valueType === api.ValueType.DOUBLE) { - return opentelemetryProto.metrics.v1.MetricDescriptorType.DOUBLE; - } - - return opentelemetryProto.metrics.v1.MetricDescriptorType.INVALID_TYPE; -} - /** * Given a MetricDescriptor, return its temporality in a compatible format with the collector * @param descriptor */ -export function toCollectorTemporality( +export function toAggregationTemporality( metric: MetricRecord -): opentelemetryProto.metrics.v1.MetricDescriptorTemporality { +): opentelemetryProto.metrics.v1.AggregationTemporality { if ( metric.descriptor.metricKind === MetricKind.COUNTER || metric.descriptor.metricKind === MetricKind.SUM_OBSERVER ) { - return opentelemetryProto.metrics.v1.MetricDescriptorTemporality.CUMULATIVE; + return opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_CUMULATIVE; } if ( metric.descriptor.metricKind === MetricKind.UP_DOWN_COUNTER || metric.descriptor.metricKind === MetricKind.UP_DOWN_SUM_OBSERVER ) { - return opentelemetryProto.metrics.v1.MetricDescriptorTemporality.DELTA; + return opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_DELTA; } if ( metric.descriptor.metricKind === MetricKind.VALUE_OBSERVER || metric.descriptor.metricKind === MetricKind.VALUE_RECORDER ) { - // TODO: Change once LastValueAggregator is implemented. - // If the aggregator is LastValue or Exact, then it will be instantaneous - return opentelemetryProto.metrics.v1.MetricDescriptorTemporality.DELTA; + return opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_DELTA; } - return opentelemetryProto.metrics.v1.MetricDescriptorTemporality - .INVALID_TEMPORALITY; -} - -/** - * Given a MetricRecord, return the Collector compatible type of MetricDescriptor - * @param metric - */ -export function toCollectorMetricDescriptor( - metric: MetricRecord -): opentelemetryProto.metrics.v1.MetricDescriptor { - return { - name: metric.descriptor.name, - description: metric.descriptor.description, - unit: metric.descriptor.unit, - type: toCollectorType(metric), - temporality: toCollectorTemporality(metric), - }; + return opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_UNSPECIFIED; } /** - * Returns an Int64Point or DoublePoint to the collector + * Returns an DataPoint which can have integers or doublle values * @param metric * @param startTime */ -export function toSingularPoint( +export function toDataPoint( metric: MetricRecord, startTime: number -): { - labels: opentelemetryProto.common.v1.StringKeyValue[]; - startTimeUnixNano: number; - timeUnixNano: number; - value: number; -} { +): opentelemetryProto.metrics.v1.DataPoint { return { labels: toCollectorLabels(metric.labels), value: metric.aggregator.toPoint().value as number, @@ -164,40 +110,11 @@ export function toHistogramPoint( count: value.count, startTimeUnixNano: startTime, timeUnixNano: core.hrTimeToNanoseconds(timestamp), - buckets: value.buckets.counts.map(count => { - return { count }; - }), + bucketCounts: value.buckets.counts, explicitBounds: value.buckets.boundaries, }; } -/** - * Returns a SummaryPoint to the collector - * @param metric - * @param startTime - */ -export function toSummaryPoint( - metric: MetricRecord, - startTime: number -): opentelemetryProto.metrics.v1.SummaryDataPoint { - const { value, timestamp } = metric.aggregator.toPoint() as { - value: Distribution; - timestamp: HrTime; - }; - - return { - labels: toCollectorLabels(metric.labels), - sum: value.sum, - count: value.count, - startTimeUnixNano: startTime, - timeUnixNano: core.hrTimeToNanoseconds(timestamp), - percentileValues: [ - { percentile: 0, value: value.min }, - { percentile: 100, value: value.max }, - ], - }; -} - /** * Converts a metric to be compatible with the collector * @param metric @@ -207,41 +124,59 @@ export function toCollectorMetric( metric: MetricRecord, startTime: number ): opentelemetryProto.metrics.v1.Metric { - if ( - toCollectorType(metric) === - opentelemetryProto.metrics.v1.MetricDescriptorType.HISTOGRAM - ) { - return { - metricDescriptor: toCollectorMetricDescriptor(metric), - histogramDataPoints: [toHistogramPoint(metric, startTime)], - }; - } - if ( - toCollectorType(metric) === - opentelemetryProto.metrics.v1.MetricDescriptorType.SUMMARY - ) { - return { - metricDescriptor: toCollectorMetricDescriptor(metric), - summaryDataPoints: [toSummaryPoint(metric, startTime)], - }; - } - if (metric.descriptor.valueType == api.ValueType.INT) { - return { - metricDescriptor: toCollectorMetricDescriptor(metric), - int64DataPoints: [toSingularPoint(metric, startTime)], - }; - } - if (metric.descriptor.valueType === api.ValueType.DOUBLE) { - return { - metricDescriptor: toCollectorMetricDescriptor(metric), - doubleDataPoints: [toSingularPoint(metric, startTime)], - }; + const metricCollector: opentelemetryProto.metrics.v1.Metric = { + name: metric.descriptor.name, + description: metric.descriptor.description, + unit: metric.descriptor.unit, + }; + + switch (metric.aggregator.kind) { + case AggregatorKind.SUM: + { + const result = { + dataPoints: [toDataPoint(metric, startTime)], + isMonotonic: + metric.descriptor.metricKind === MetricKind.COUNTER || + metric.descriptor.metricKind === MetricKind.SUM_OBSERVER, + aggregationTemporality: toAggregationTemporality(metric), + }; + if (metric.descriptor.valueType === api.ValueType.INT) { + metricCollector.intSum = result; + } else { + metricCollector.doubleSum = result; + } + } + break; + + case AggregatorKind.LAST_VALUE: + { + const result = { + dataPoints: [toDataPoint(metric, startTime)], + }; + if (metric.descriptor.valueType === api.ValueType.INT) { + metricCollector.intGauge = result; + } else { + metricCollector.doubleGauge = result; + } + } + break; + + case AggregatorKind.HISTOGRAM: + { + const result = { + dataPoints: [toHistogramPoint(metric, startTime)], + aggregationTemporality: toAggregationTemporality(metric), + }; + if (metric.descriptor.valueType === api.ValueType.INT) { + metricCollector.intHistogram = result; + } else { + metricCollector.doubleHistogram = result; + } + } + break; } - return { - metricDescriptor: toCollectorMetricDescriptor(metric), - int64DataPoints: [], - }; + return metricCollector; } /** diff --git a/packages/opentelemetry-exporter-collector/src/types.ts b/packages/opentelemetry-exporter-collector/src/types.ts index 23ff4da1a5..9e612cc62d 100644 --- a/packages/opentelemetry-exporter-collector/src/types.ts +++ b/packages/opentelemetry-exporter-collector/src/types.ts @@ -48,25 +48,53 @@ export namespace opentelemetryProto { export namespace metrics.v1 { export interface Metric { - metricDescriptor: opentelemetryProto.metrics.v1.MetricDescriptor; - int64DataPoints?: opentelemetryProto.metrics.v1.Int64DataPoint[]; - doubleDataPoints?: opentelemetryProto.metrics.v1.DoubleDataPoint[]; - histogramDataPoints?: opentelemetryProto.metrics.v1.HistogramDataPoint[]; - summaryDataPoints?: opentelemetryProto.metrics.v1.SummaryDataPoint[]; + name: string; + description: string; + unit: string; + // data: + intGauge?: opentelemetryProto.metrics.v1.Gauge; + doubleGauge?: opentelemetryProto.metrics.v1.Gauge; + intSum?: opentelemetryProto.metrics.v1.Sum; + doubleSum?: opentelemetryProto.metrics.v1.Sum; + intHistogram?: opentelemetryProto.metrics.v1.Histogram; + doubleHistogram?: opentelemetryProto.metrics.v1.Histogram; + + // metricDescriptor: opentelemetryProto.metrics.v1.MetricDescriptor; + // int64DataPoints?: opentelemetryProto.metrics.v1.Int64DataPoint[]; + // doubleDataPoints?: opentelemetryProto.metrics.v1.DoubleDataPoint[]; + // histogramDataPoints?: opentelemetryProto.metrics.v1.HistogramDataPoint[]; + // summaryDataPoints?: opentelemetryProto.metrics.v1.SummaryDataPoint[]; + } + + export interface Gauge { + dataPoints: opentelemetryProto.metrics.v1.DataPoint[]; + } + + export interface Sum { + dataPoints: opentelemetryProto.metrics.v1.DataPoint[]; + aggregationTemporality: opentelemetryProto.metrics.v1.AggregationTemporality; + isMonotonic: boolean; + } + + export interface Histogram { + dataPoints: opentelemetryProto.metrics.v1.HistogramDataPoint[]; + aggregationTemporality: opentelemetryProto.metrics.v1.AggregationTemporality; } - export interface Int64DataPoint { + export interface DataPoint { labels: opentelemetryProto.common.v1.StringKeyValue[]; startTimeUnixNano: number; timeUnixNano: number; value: number; + exemplars?: opentelemetryProto.metrics.v1.Exemplar[]; } - export interface DoubleDataPoint { - labels: opentelemetryProto.common.v1.StringKeyValue[]; - startTimeUnixNano: number; + export interface Exemplar { + filteredLabels: opentelemetryProto.common.v1.StringKeyValue[]; timeUnixNano: number; value: number; + spanId: Uint8Array; + traceId: Uint8Array; } export interface HistogramDataPoint { @@ -75,41 +103,9 @@ export namespace opentelemetryProto { timeUnixNano: number; count: number; sum: number; - buckets?: opentelemetryProto.metrics.v1.HistogramDataPointBucket[]; + bucketCounts?: number[]; explicitBounds?: number[]; - } - - export interface HistogramDataPointBucket { - count: number; - exemplar?: opentelemetryProto.metrics.v1.HistogramExemplar; - } - - export interface HistogramExemplar { - value: number; - timeUnixNano: number; - attachments: opentelemetryProto.common.v1.StringKeyValue[]; - } - - export interface SummaryDataPoint { - labels: opentelemetryProto.common.v1.StringKeyValue[]; - startTimeUnixNano: number; - timeUnixNano: number; - count?: number; - sum?: number; - percentileValues: opentelemetryProto.metrics.v1.SummaryDataPointValueAtPercentile[]; - } - - export interface SummaryDataPointValueAtPercentile { - percentile: number; - value: number; - } - - export interface MetricDescriptor { - name: string; - description: string; - unit: string; - type: opentelemetryProto.metrics.v1.MetricDescriptorType; - temporality: opentelemetryProto.metrics.v1.MetricDescriptorTemporality; + exemplars?: opentelemetryProto.metrics.v1.Exemplar[][]; } export interface InstrumentationLibraryMetrics { @@ -122,21 +118,89 @@ export namespace opentelemetryProto { instrumentationLibraryMetrics: opentelemetryProto.metrics.v1.InstrumentationLibraryMetrics[]; } - export enum MetricDescriptorType { - INVALID_TYPE, - INT64, - MONOTONIC_INT64, - DOUBLE, - MONOTONIC_DOUBLE, - HISTOGRAM, - SUMMARY, - } - - export enum MetricDescriptorTemporality { - INVALID_TEMPORALITY, - INSTANTANEOUS, - DELTA, - CUMULATIVE, + // export enum MetricDescriptorType { + // INVALID_TYPE, + // INT64, + // MONOTONIC_INT64, + // DOUBLE, + // MONOTONIC_DOUBLE, + // HISTOGRAM, + // SUMMARY, + // } + + // export enum MetricDescriptorTemporality { + // INVALID_TEMPORALITY, + // INSTANTANEOUS, + // DELTA, + // CUMULATIVE, + // } + + export enum AggregationTemporality { + // UNSPECIFIED is the default AggregationTemporality, it MUST not be used. + AGGREGATION_TEMPORALITY_UNSPECIFIED = 0, + + // DELTA is an AggregationTemporality for a metric aggregator which reports + // changes since last report time. Successive metrics contain aggregation of + // values from continuous and non-overlapping intervals. + // + // The values for a DELTA metric are based only on the time interval + // associated with one measurement cycle. There is no dependency on + // previous measurements like is the case for CUMULATIVE metrics. + // + // For example, consider a system measuring the number of requests that + // it receives and reports the sum of these requests every second as a + // DELTA metric: + // + // 1. The system starts receiving at time=t_0. + // 2. A request is received, the system measures 1 request. + // 3. A request is received, the system measures 1 request. + // 4. A request is received, the system measures 1 request. + // 5. The 1 second collection cycle ends. A metric is exported for the + // number of requests received over the interval of time t_0 to + // t_0+1 with a value of 3. + // 6. A request is received, the system measures 1 request. + // 7. A request is received, the system measures 1 request. + // 8. The 1 second collection cycle ends. A metric is exported for the + // number of requests received over the interval of time t_0+1 to + // t_0+2 with a value of 2. + AGGREGATION_TEMPORALITY_DELTA = 1, + + // CUMULATIVE is an AggregationTemporality for a metric aggregator which + // reports changes since a fixed start time. This means that current values + // of a CUMULATIVE metric depend on all previous measurements since the + // start time. Because of this, the sender is required to retain this state + // in some form. If this state is lost or invalidated, the CUMULATIVE metric + // values MUST be reset and a new fixed start time following the last + // reported measurement time sent MUST be used. + // + // For example, consider a system measuring the number of requests that + // it receives and reports the sum of these requests every second as a + // CUMULATIVE metric: + // + // 1. The system starts receiving at time=t_0. + // 2. A request is received, the system measures 1 request. + // 3. A request is received, the system measures 1 request. + // 4. A request is received, the system measures 1 request. + // 5. The 1 second collection cycle ends. A metric is exported for the + // number of requests received over the interval of time t_0 to + // t_0+1 with a value of 3. + // 6. A request is received, the system measures 1 request. + // 7. A request is received, the system measures 1 request. + // 8. The 1 second collection cycle ends. A metric is exported for the + // number of requests received over the interval of time t_0 to + // t_0+2 with a value of 5. + // 9. The system experiences a fault and loses state. + // 10. The system recovers and resumes receiving at time=t_1. + // 11. A request is received, the system measures 1 request. + // 12. The 1 second collection cycle ends. A metric is exported for the + // number of requests received over the interval of time t_1 to + // t_0+1 with a value of 1. + // + // Note: Even though, when reporting changes since last report time, using + // CUMULATIVE is valid, it is not recommended. This may cause problems for + // systems that do not use start_time to determine when the aggregation + // value was reset (e.g. Prometheus). + AGGREGATION_TEMPORALITY_CUMULATIVE = 2, } } @@ -166,11 +230,11 @@ export namespace opentelemetryProto { export enum SpanKind { SPAN_KIND_UNSPECIFIED, - INTERNAL, - SERVER, - CLIENT, - PRODUCER, - CONSUMER, + SPAN_KIND_INTERNAL, + SPAN_KIND_SERVER, + SPAN_KIND_CLIENT, + SPAN_KIND_PRODUCER, + SPAN_KIND_CONSUMER, } export type TraceState = string | undefined; @@ -302,9 +366,12 @@ export interface CollectorExporterConfigBase { * Mapping between api SpanKind and proto SpanKind */ export const COLLECTOR_SPAN_KIND_MAPPING = { - [SpanKind.INTERNAL]: opentelemetryProto.trace.v1.Span.SpanKind.INTERNAL, - [SpanKind.SERVER]: opentelemetryProto.trace.v1.Span.SpanKind.SERVER, - [SpanKind.CLIENT]: opentelemetryProto.trace.v1.Span.SpanKind.CLIENT, - [SpanKind.PRODUCER]: opentelemetryProto.trace.v1.Span.SpanKind.PRODUCER, - [SpanKind.CONSUMER]: opentelemetryProto.trace.v1.Span.SpanKind.CONSUMER, + [SpanKind.INTERNAL]: + opentelemetryProto.trace.v1.Span.SpanKind.SPAN_KIND_INTERNAL, + [SpanKind.SERVER]: opentelemetryProto.trace.v1.Span.SpanKind.SPAN_KIND_SERVER, + [SpanKind.CLIENT]: opentelemetryProto.trace.v1.Span.SpanKind.SPAN_KIND_CLIENT, + [SpanKind.PRODUCER]: + opentelemetryProto.trace.v1.Span.SpanKind.SPAN_KIND_PRODUCER, + [SpanKind.CONSUMER]: + opentelemetryProto.trace.v1.Span.SpanKind.SPAN_KIND_CONSUMER, }; diff --git a/packages/opentelemetry-exporter-collector/test/browser/CollectorMetricExporter.test.ts b/packages/opentelemetry-exporter-collector/test/browser/CollectorMetricExporter.test.ts index 50b6279498..76c9d98225 100644 --- a/packages/opentelemetry-exporter-collector/test/browser/CollectorMetricExporter.test.ts +++ b/packages/opentelemetry-exporter-collector/test/browser/CollectorMetricExporter.test.ts @@ -29,12 +29,11 @@ import { ensureWebResourceIsCorrect, ensureExportMetricsServiceRequestIsSet, ensureHeadersContain, - mockHistogram, mockValueRecorder, ensureValueRecorderIsCorrect, - ensureHistogramIsCorrect, } from '../helper'; import { hrTimeToNanoseconds } from '@opentelemetry/core'; + const sendBeacon = navigator.sendBeacon; describe('CollectorMetricExporter - web', () => { @@ -44,22 +43,20 @@ describe('CollectorMetricExporter - web', () => { let spyBeacon: any; let metrics: MetricRecord[]; - beforeEach(() => { + beforeEach(async () => { spyOpen = sinon.stub(XMLHttpRequest.prototype, 'open'); spySend = sinon.stub(XMLHttpRequest.prototype, 'send'); spyBeacon = sinon.stub(navigator, 'sendBeacon'); metrics = []; - metrics.push(mockCounter()); - metrics.push(mockObserver()); - metrics.push(mockHistogram()); - metrics.push(mockValueRecorder()); + metrics.push(await mockCounter()); + metrics.push(await mockObserver()); + metrics.push(await mockValueRecorder()); metrics[0].aggregator.update(1); metrics[1].aggregator.update(3); metrics[1].aggregator.update(6); metrics[2].aggregator.update(7); metrics[2].aggregator.update(14); - metrics[3].aggregator.update(5); }); afterEach(() => { @@ -95,11 +92,10 @@ describe('CollectorMetricExporter - web', () => { const metric1 = json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[0]; const metric2 = - json.resourceMetrics[1].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[1]; const metric3 = - json.resourceMetrics[2].instrumentationLibraryMetrics[0].metrics[0]; - const metric4 = - json.resourceMetrics[3].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[2]; + assert.ok(typeof metric1 !== 'undefined', "metric doesn't exist"); if (metric1) { ensureCounterIsCorrect( @@ -124,20 +120,10 @@ describe('CollectorMetricExporter - web', () => { "third metric doesn't exist" ); if (metric3) { - ensureHistogramIsCorrect( - metric3, - hrTimeToNanoseconds(metrics[2].aggregator.toPoint().timestamp) - ); - } - - assert.ok( - typeof metric4 !== 'undefined', - "fourth metric doesn't exist" - ); - if (metric4) { ensureValueRecorderIsCorrect( - metric4, - hrTimeToNanoseconds(metrics[3].aggregator.toPoint().timestamp) + metric3, + hrTimeToNanoseconds(metrics[2].aggregator.toPoint().timestamp), + true ); } @@ -227,11 +213,9 @@ describe('CollectorMetricExporter - web', () => { const metric1 = json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[0]; const metric2 = - json.resourceMetrics[1].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[1]; const metric3 = - json.resourceMetrics[2].instrumentationLibraryMetrics[0].metrics[0]; - const metric4 = - json.resourceMetrics[3].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[2]; assert.ok(typeof metric1 !== 'undefined', "metric doesn't exist"); if (metric1) { ensureCounterIsCorrect( @@ -255,20 +239,10 @@ describe('CollectorMetricExporter - web', () => { "third metric doesn't exist" ); if (metric3) { - ensureHistogramIsCorrect( - metric3, - hrTimeToNanoseconds(metrics[2].aggregator.toPoint().timestamp) - ); - } - - assert.ok( - typeof metric4 !== 'undefined', - "fourth metric doesn't exist" - ); - if (metric4) { ensureValueRecorderIsCorrect( - metric4, - hrTimeToNanoseconds(metrics[3].aggregator.toPoint().timestamp) + metric3, + hrTimeToNanoseconds(metrics[2].aggregator.toPoint().timestamp), + true ); } diff --git a/packages/opentelemetry-exporter-collector/test/common/CollectorMetricExporter.test.ts b/packages/opentelemetry-exporter-collector/test/common/CollectorMetricExporter.test.ts index edf52252e4..08d27dc826 100644 --- a/packages/opentelemetry-exporter-collector/test/common/CollectorMetricExporter.test.ts +++ b/packages/opentelemetry-exporter-collector/test/common/CollectorMetricExporter.test.ts @@ -52,7 +52,7 @@ describe('CollectorMetricExporter - common', () => { describe('constructor', () => { let onInitSpy: any; - beforeEach(() => { + beforeEach(async () => { onInitSpy = sinon.stub(CollectorMetricExporter.prototype, 'onInit'); collectorExporterConfig = { hostname: 'foo', @@ -63,8 +63,8 @@ describe('CollectorMetricExporter - common', () => { }; collectorExporter = new CollectorMetricExporter(collectorExporterConfig); metrics = []; - metrics.push(mockCounter()); - metrics.push(mockObserver()); + metrics.push(await mockCounter()); + metrics.push(await mockObserver()); }); afterEach(() => { diff --git a/packages/opentelemetry-exporter-collector/test/common/transformMetrics.test.ts b/packages/opentelemetry-exporter-collector/test/common/transformMetrics.test.ts index 5c725a9007..3b951b2149 100644 --- a/packages/opentelemetry-exporter-collector/test/common/transformMetrics.test.ts +++ b/packages/opentelemetry-exporter-collector/test/common/transformMetrics.test.ts @@ -19,42 +19,26 @@ import { mockCounter, mockDoubleCounter, mockObserver, - mockedResources, - mockedInstrumentationLibraries, - multiResourceMetrics, - multiInstrumentationLibraryMetrics, ensureCounterIsCorrect, ensureDoubleCounterIsCorrect, ensureObserverIsCorrect, - mockHistogram, - ensureHistogramIsCorrect, ensureValueRecorderIsCorrect, mockValueRecorder, + mockedResources, + mockedInstrumentationLibraries, + multiResourceMetricsGet, + multiInstrumentationLibraryMetricsGet, } from '../helper'; import { MetricRecord, SumAggregator } from '@opentelemetry/metrics'; import { hrTimeToNanoseconds } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; describe('transformMetrics', () => { - describe('toCollectorMetric', () => { - const counter: MetricRecord = mockCounter(); - const doubleCounter: MetricRecord = mockDoubleCounter(); - const observer: MetricRecord = mockObserver(); - const histogram: MetricRecord = mockHistogram(); - const recorder: MetricRecord = mockValueRecorder(); - const invalidMetric: MetricRecord = { - descriptor: { - name: 'name', - description: 'description', - unit: 'unit', - metricKind: 8, // Not a valid metricKind - valueType: 2, // Not double or int - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({}), - instrumentationLibrary: { name: 'x', version: 'y' }, - }; + describe('toCollectorMetric', async () => { + const counter: MetricRecord = await mockCounter(); + const doubleCounter: MetricRecord = await mockDoubleCounter(); + const observer: MetricRecord = await mockObserver(); + const recorder: MetricRecord = await mockValueRecorder(); beforeEach(() => { // Counter counter.aggregator.update(1); @@ -66,12 +50,9 @@ describe('transformMetrics', () => { observer.aggregator.update(3); observer.aggregator.update(6); - // Histogram - histogram.aggregator.update(7); - histogram.aggregator.update(14); - // ValueRecorder - recorder.aggregator.update(5); + recorder.aggregator.update(7); + recorder.aggregator.update(14); }); it('should convert metric', () => { @@ -83,10 +64,6 @@ describe('transformMetrics', () => { transform.toCollectorMetric(observer, 1592602232694000000), hrTimeToNanoseconds(observer.aggregator.toPoint().timestamp) ); - ensureHistogramIsCorrect( - transform.toCollectorMetric(histogram, 1592602232694000000), - hrTimeToNanoseconds(histogram.aggregator.toPoint().timestamp) - ); ensureValueRecorderIsCorrect( transform.toCollectorMetric(recorder, 1592602232694000000), @@ -97,12 +74,6 @@ describe('transformMetrics', () => { transform.toCollectorMetric(doubleCounter, 1592602232694000000), hrTimeToNanoseconds(doubleCounter.aggregator.toPoint().timestamp) ); - - const emptyMetric = transform.toCollectorMetric( - invalidMetric, - 1592602232694000000 - ); - assert.deepStrictEqual(emptyMetric.int64DataPoints, []); }); it('should convert metric labels value to string', () => { @@ -122,50 +93,52 @@ describe('transformMetrics', () => { }, 1592602232694000000 ); - const collectorMetric = - metric.int64DataPoints && metric.int64DataPoints[0]; + const collectorMetric = metric.intSum?.dataPoints[0]; assert.strictEqual(collectorMetric?.labels[0].value, '1'); }); }); - describe('toCollectorMetricDescriptor', () => { - describe('groupMetricsByResourceAndLibrary', () => { - it('should group by resource', () => { - const [resource1, resource2] = mockedResources; - const [library] = mockedInstrumentationLibraries; - const [metric1, metric2, metric3] = multiResourceMetrics; - const expected = new Map([ - [resource1, new Map([[library, [metric1, metric3]]])], - [resource2, new Map([[library, [metric2]]])], - ]); + describe('groupMetricsByResourceAndLibrary', () => { + it('should group by resource', async () => { + const [resource1, resource2] = mockedResources; + const [library] = mockedInstrumentationLibraries; + const [metric1, metric2, metric3] = await multiResourceMetricsGet(); - const result = transform.groupMetricsByResourceAndLibrary( - multiResourceMetrics - ); + const expected = new Map([ + [resource1, new Map([[library, [metric1, metric3]]])], + [resource2, new Map([[library, [metric2]]])], + ]); - assert.deepStrictEqual(result, expected); - }); + const result = transform.groupMetricsByResourceAndLibrary( + await multiResourceMetricsGet() + ); - it('should group by instrumentation library', () => { - const [resource] = mockedResources; - const [lib1, lib2] = mockedInstrumentationLibraries; - const [metric1, metric2, metric3] = multiInstrumentationLibraryMetrics; - const expected = new Map([ - [ - resource, - new Map([ - [lib1, [metric1, metric3]], - [lib2, [metric2]], - ]), - ], - ]); + assert.deepStrictEqual(result, expected); + }); - const result = transform.groupMetricsByResourceAndLibrary( - multiInstrumentationLibraryMetrics - ); + it('should group by instrumentation library', async () => { + const [resource] = mockedResources; + const [lib1, lib2] = mockedInstrumentationLibraries; + const [ + metric1, + metric2, + metric3, + ] = await multiInstrumentationLibraryMetricsGet(); + const expected = new Map([ + [ + resource, + new Map([ + [lib1, [metric1, metric3]], + [lib2, [metric2]], + ]), + ], + ]); + + const result = transform.groupMetricsByResourceAndLibrary( + await multiInstrumentationLibraryMetricsGet() + ); - assert.deepStrictEqual(result, expected); - }); + assert.deepStrictEqual(result, expected); }); }); }); diff --git a/packages/opentelemetry-exporter-collector/test/helper.ts b/packages/opentelemetry-exporter-collector/test/helper.ts index b74c09ac20..20df72696d 100644 --- a/packages/opentelemetry-exporter-collector/test/helper.ts +++ b/packages/opentelemetry-exporter-collector/test/helper.ts @@ -17,17 +17,22 @@ import { TraceFlags, ValueType } from '@opentelemetry/api'; import { ReadableSpan } from '@opentelemetry/tracing'; import { Resource } from '@opentelemetry/resources'; +import { MetricRecord, MeterProvider } from '@opentelemetry/metrics'; +import { InstrumentationLibrary } from '@opentelemetry/core'; import * as assert from 'assert'; import { opentelemetryProto } from '../src/types'; import * as collectorTypes from '../src/types'; -import { - MetricRecord, - MetricKind, - SumAggregator, - MinMaxLastSumCountAggregator, - HistogramAggregator, -} from '@opentelemetry/metrics'; -import { InstrumentationLibrary } from '@opentelemetry/core'; + +const meterProvider = new MeterProvider({ + interval: 30000, + resource: new Resource({ + service: 'ui', + version: 1, + cost: 112.12, + }), +}); + +const meter = meterProvider.getMeter('default', '0.0.1'); if (typeof Buffer === 'undefined') { (window as any).Buffer = { @@ -37,104 +42,61 @@ if (typeof Buffer === 'undefined') { }; } -export function mockCounter(): MetricRecord { - return { - descriptor: { - name: 'test-counter', +export async function mockCounter(): Promise { + const name = 'int-counter'; + const metric = + meter['_metrics'].get(name) || + meter.createCounter(name, { description: 'sample counter description', - unit: '1', - metricKind: MetricKind.COUNTER, valueType: ValueType.INT, - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockDoubleCounter(): MetricRecord { - return { - descriptor: { - name: 'test-counter', +export async function mockDoubleCounter(): Promise { + const name = 'double-counter'; + const metric = + meter['_metrics'].get(name) || + meter.createCounter(name, { description: 'sample counter description', - unit: '1', - metricKind: MetricKind.COUNTER, valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new SumAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockObserver(): MetricRecord { - return { - descriptor: { - name: 'test-observer', +export async function mockObserver(): Promise { + const name = 'double-observer'; + const metric = + meter['_metrics'].get(name) || + meter.createValueObserver(name, { description: 'sample observer description', - unit: '2', - metricKind: MetricKind.VALUE_OBSERVER, valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new MinMaxLastSumCountAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + }); + metric.clear(); + metric.bind({}); + + return (await metric.getMetricRecord())[0]; } -export function mockValueRecorder(): MetricRecord { - return { - descriptor: { - name: 'test-recorder', +export async function mockValueRecorder(): Promise { + const name = 'int-recorder'; + const metric = + meter['_metrics'].get(name) || + meter.createValueRecorder(name, { description: 'sample recorder description', - unit: '3', - metricKind: MetricKind.VALUE_RECORDER, valueType: ValueType.INT, - }, - labels: {}, - aggregator: new MinMaxLastSumCountAggregator(), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; -} + boundaries: [0, 100], + }); + metric.clear(); + metric.bind({}); -export function mockHistogram(): MetricRecord { - return { - descriptor: { - name: 'test-hist', - description: 'sample observer description', - unit: '2', - metricKind: MetricKind.VALUE_OBSERVER, - valueType: ValueType.DOUBLE, - }, - labels: {}, - aggregator: new HistogramAggregator([10, 20]), - resource: new Resource({ - service: 'ui', - version: 1, - cost: 112.12, - }), - instrumentationLibrary: { name: 'default', version: '0.0.1' }, - }; + return (await metric.getMetricRecord())[0]; } const traceIdBase64 = 'HxAI3I4nDoXECg18OTmyeA=='; @@ -289,41 +251,49 @@ export const multiResourceTrace: ReadableSpan[] = [ }, ]; -export const multiResourceMetrics: MetricRecord[] = [ - { - ...mockCounter(), - resource: mockedResources[0], - instrumentationLibrary: mockedInstrumentationLibraries[0], - }, - { - ...mockObserver(), - resource: mockedResources[1], - instrumentationLibrary: mockedInstrumentationLibraries[0], - }, - { - ...mockCounter(), - resource: mockedResources[0], - instrumentationLibrary: mockedInstrumentationLibraries[0], - }, -]; +export const multiResourceMetricsGet = async function (): Promise< + MetricRecord[] +> { + return [ + { + ...(await mockCounter()), + resource: mockedResources[0], + instrumentationLibrary: mockedInstrumentationLibraries[0], + }, + { + ...(await mockObserver()), + resource: mockedResources[1], + instrumentationLibrary: mockedInstrumentationLibraries[0], + }, + { + ...(await mockCounter()), + resource: mockedResources[0], + instrumentationLibrary: mockedInstrumentationLibraries[0], + }, + ]; +}; -export const multiInstrumentationLibraryMetrics: MetricRecord[] = [ - { - ...mockCounter(), - resource: mockedResources[0], - instrumentationLibrary: mockedInstrumentationLibraries[0], - }, - { - ...mockObserver(), - resource: mockedResources[0], - instrumentationLibrary: mockedInstrumentationLibraries[1], - }, - { - ...mockCounter(), - resource: mockedResources[0], - instrumentationLibrary: mockedInstrumentationLibraries[0], - }, -]; +export const multiInstrumentationLibraryMetricsGet = async function (): Promise< + MetricRecord[] +> { + return [ + { + ...(await mockCounter()), + resource: mockedResources[0], + instrumentationLibrary: mockedInstrumentationLibraries[0], + }, + { + ...(await mockObserver()), + resource: mockedResources[0], + instrumentationLibrary: mockedInstrumentationLibraries[1], + }, + { + ...(await mockCounter()), + resource: mockedResources[0], + instrumentationLibrary: mockedInstrumentationLibraries[0], + }, + ]; +}; export const multiInstrumentationLibraryTrace: ReadableSpan[] = [ { @@ -462,7 +432,7 @@ export function ensureSpanIsCorrect( assert.strictEqual(span.name, 'documentFetch', 'name is wrong'); assert.strictEqual( span.kind, - opentelemetryProto.trace.v1.Span.SpanKind.INTERNAL, + opentelemetryProto.trace.v1.Span.SpanKind.SPAN_KIND_INTERNAL, 'kind is wrong' ); assert.strictEqual( @@ -524,25 +494,23 @@ export function ensureCounterIsCorrect( time: number ) { assert.deepStrictEqual(metric, { - metricDescriptor: { - name: 'test-counter', - description: 'sample counter description', - unit: '1', - type: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorType - .MONOTONIC_INT64, - temporality: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorTemporality - .CUMULATIVE, + name: 'int-counter', + description: 'sample counter description', + unit: '1', + intSum: { + dataPoints: [ + { + labels: [], + value: 1, + startTimeUnixNano: 1592602232694000000, + timeUnixNano: time, + }, + ], + isMonotonic: true, + aggregationTemporality: + collectorTypes.opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_CUMULATIVE, }, - int64DataPoints: [ - { - labels: [], - value: 1, - startTimeUnixNano: 1592602232694000000, - timeUnixNano: time, - }, - ], }); } @@ -551,25 +519,23 @@ export function ensureDoubleCounterIsCorrect( time: number ) { assert.deepStrictEqual(metric, { - metricDescriptor: { - name: 'test-counter', - description: 'sample counter description', - unit: '1', - type: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorType - .MONOTONIC_DOUBLE, - temporality: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorTemporality - .CUMULATIVE, + name: 'double-counter', + description: 'sample counter description', + unit: '1', + doubleSum: { + dataPoints: [ + { + labels: [], + value: 8, + startTimeUnixNano: 1592602232694000000, + timeUnixNano: time, + }, + ], + isMonotonic: true, + aggregationTemporality: + collectorTypes.opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_CUMULATIVE, }, - doubleDataPoints: [ - { - labels: [], - value: 8, - startTimeUnixNano: 1592602232694000000, - timeUnixNano: time, - }, - ], }); } @@ -578,95 +544,47 @@ export function ensureObserverIsCorrect( time: number ) { assert.deepStrictEqual(metric, { - metricDescriptor: { - name: 'test-observer', - description: 'sample observer description', - unit: '2', - type: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorType - .SUMMARY, - temporality: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorTemporality - .DELTA, + name: 'double-observer', + description: 'sample observer description', + unit: '1', + doubleGauge: { + dataPoints: [ + { + labels: [], + value: 6, + startTimeUnixNano: 1592602232694000000, + timeUnixNano: time, + }, + ], }, - summaryDataPoints: [ - { - startTimeUnixNano: 1592602232694000000, - timeUnixNano: time, - count: 2, - sum: 9, - labels: [], - percentileValues: [ - { - percentile: 0, - value: 3, - }, - { percentile: 100, value: 6 }, - ], - }, - ], }); } export function ensureValueRecorderIsCorrect( metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, - time: number + time: number, + infinityIsNull?: boolean ) { assert.deepStrictEqual(metric, { - metricDescriptor: { - name: 'test-recorder', - description: 'sample recorder description', - unit: '3', - type: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorType - .SUMMARY, - temporality: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorTemporality - .DELTA, - }, - summaryDataPoints: [ - { - count: 1, - sum: 5, - labels: [], - percentileValues: [ - { percentile: 0, value: 5 }, - { percentile: 100, value: 5 }, - ], - startTimeUnixNano: 1592602232694000000, - timeUnixNano: time, - }, - ], - }); -} - -export function ensureHistogramIsCorrect( - metric: collectorTypes.opentelemetryProto.metrics.v1.Metric, - time: number -) { - assert.deepStrictEqual(metric, { - metricDescriptor: { - name: 'test-hist', - description: 'sample observer description', - unit: '2', - type: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorType - .HISTOGRAM, - temporality: - collectorTypes.opentelemetryProto.metrics.v1.MetricDescriptorTemporality - .DELTA, + name: 'int-recorder', + description: 'sample recorder description', + unit: '1', + intHistogram: { + dataPoints: [ + { + labels: [], + sum: 21, + count: 2, + startTimeUnixNano: 1592602232694000000, + timeUnixNano: time, + bucketCounts: [2, 0], + explicitBounds: [infinityIsNull ? null : Infinity], + }, + ], + aggregationTemporality: + collectorTypes.opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_DELTA, }, - histogramDataPoints: [ - { - labels: [], - buckets: [{ count: 1 }, { count: 1 }, { count: 0 }], - count: 2, - sum: 21, - explicitBounds: [10, 20], - startTimeUnixNano: 1592602232694000000, - timeUnixNano: time, - }, - ], }); } @@ -709,7 +627,7 @@ export function ensureExportMetricsServiceRequestIsSet( const resourceMetrics = json.resourceMetrics; assert.strictEqual( resourceMetrics.length, - 4, + 1, 'resourceMetrics has incorrect length' ); @@ -732,10 +650,8 @@ export function ensureExportMetricsServiceRequestIsSet( 'instrumentationLibrary is missing' ); - const metric1 = resourceMetrics[0].instrumentationLibraryMetrics[0].metrics; - const metric2 = resourceMetrics[1].instrumentationLibraryMetrics[0].metrics; - assert.strictEqual(metric1.length, 1, 'Metrics are missing'); - assert.strictEqual(metric2.length, 1, 'Metrics are missing'); + const metrics = resourceMetrics[0].instrumentationLibraryMetrics[0].metrics; + assert.strictEqual(metrics.length, 3, 'Metrics are missing'); } export function ensureHeadersContain( diff --git a/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts b/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts index 372fe44133..d7a72c3254 100644 --- a/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts +++ b/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts @@ -26,12 +26,10 @@ import * as collectorTypes from '../../src/types'; import { mockCounter, mockObserver, - mockHistogram, ensureExportMetricsServiceRequestIsSet, ensureCounterIsCorrect, mockValueRecorder, ensureValueRecorderIsCorrect, - ensureHistogramIsCorrect, ensureObserverIsCorrect, } from '../helper'; import { MetricRecord } from '@opentelemetry/metrics'; @@ -75,7 +73,7 @@ describe('CollectorMetricExporter - node with json over http', () => { }); describe('export', () => { - beforeEach(() => { + beforeEach(async () => { spyRequest = sinon.stub(http, 'request').returns(fakeRequest as any); spyWrite = sinon.stub(fakeRequest, 'write'); collectorExporterConfig = { @@ -94,17 +92,16 @@ describe('CollectorMetricExporter - node with json over http', () => { value: 1592602232694000000, }); metrics = []; - metrics.push(mockCounter()); - metrics.push(mockObserver()); - metrics.push(mockHistogram()); - metrics.push(mockValueRecorder()); + metrics.push(await mockCounter()); + metrics.push(await mockObserver()); + metrics.push(await mockValueRecorder()); metrics[0].aggregator.update(1); metrics[1].aggregator.update(3); metrics[1].aggregator.update(6); metrics[2].aggregator.update(7); metrics[2].aggregator.update(14); - metrics[3].aggregator.update(5); }); + afterEach(() => { spyRequest.restore(); spyWrite.restore(); @@ -146,11 +143,10 @@ describe('CollectorMetricExporter - node with json over http', () => { const metric1 = json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[0]; const metric2 = - json.resourceMetrics[1].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[1]; const metric3 = - json.resourceMetrics[2].instrumentationLibraryMetrics[0].metrics[0]; - const metric4 = - json.resourceMetrics[3].instrumentationLibraryMetrics[0].metrics[0]; + json.resourceMetrics[0].instrumentationLibraryMetrics[0].metrics[2]; + assert.ok(typeof metric1 !== 'undefined', "counter doesn't exist"); ensureCounterIsCorrect( metric1, @@ -162,17 +158,10 @@ describe('CollectorMetricExporter - node with json over http', () => { core.hrTimeToNanoseconds(metrics[1].aggregator.toPoint().timestamp) ); assert.ok(typeof metric3 !== 'undefined', "histogram doesn't exist"); - ensureHistogramIsCorrect( - metric3, - core.hrTimeToNanoseconds(metrics[2].aggregator.toPoint().timestamp) - ); - assert.ok( - typeof metric4 !== 'undefined', - "value recorder doesn't exist" - ); ensureValueRecorderIsCorrect( - metric4, - core.hrTimeToNanoseconds(metrics[3].aggregator.toPoint().timestamp) + metric3, + core.hrTimeToNanoseconds(metrics[2].aggregator.toPoint().timestamp), + true ); ensureExportMetricsServiceRequestIsSet(json); diff --git a/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts b/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts index 7e70eb986f..8e345d2ed3 100644 --- a/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts +++ b/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts @@ -16,7 +16,6 @@ import { MetricRecord, AggregatorKind, - Distribution, MetricKind, } from '@opentelemetry/metrics'; import { PrometheusCheckpoint } from './types'; @@ -94,8 +93,6 @@ function toPrometheusType( return 'gauge'; case AggregatorKind.LAST_VALUE: return 'gauge'; - case AggregatorKind.DISTRIBUTION: - return 'summary'; case AggregatorKind.HISTOGRAM: return 'histogram'; default: @@ -194,38 +191,6 @@ export class PrometheusSerializer { ); break; } - case AggregatorKind.DISTRIBUTION: { - const { value, timestamp: hrtime } = record.aggregator.toPoint(); - const timestamp = hrTimeToMilliseconds(hrtime); - for (const key of ['count', 'sum'] as (keyof Distribution)[]) { - results += stringify( - name + '_' + key, - record.labels, - value[key], - this._appendTimestamp ? timestamp : undefined, - undefined - ); - } - results += stringify( - name, - record.labels, - value.min, - this._appendTimestamp ? timestamp : undefined, - { - quantile: '0', - } - ); - results += stringify( - name, - record.labels, - value.max, - this._appendTimestamp ? timestamp : undefined, - { - quantile: '1', - } - ); - break; - } case AggregatorKind.HISTOGRAM: { const { value, timestamp: hrtime } = record.aggregator.toPoint(); const timestamp = hrTimeToMilliseconds(hrtime); diff --git a/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts b/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts index 8936da63ab..f16fb47256 100644 --- a/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts +++ b/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts @@ -20,7 +20,8 @@ import { SumAggregator, Meter, MeterProvider, - MinMaxLastSumCountAggregator, + LastValueAggregator, + HistogramAggregator, } from '@opentelemetry/metrics'; import * as assert from 'assert'; import * as http from 'http'; @@ -29,7 +30,8 @@ import { mockAggregator, mockedHrTimeMs } from './util'; describe('PrometheusExporter', () => { mockAggregator(SumAggregator); - mockAggregator(MinMaxLastSumCountAggregator); + mockAggregator(LastValueAggregator); + mockAggregator(HistogramAggregator); describe('constructor', () => { it('should construct an exporter', done => { @@ -263,14 +265,10 @@ describe('PrometheusExporter', () => { assert.deepStrictEqual(lines, [ '# HELP metric_observer a test description', - '# TYPE metric_observer summary', - `metric_observer_count{pid="123",core="1"} 1 ${mockedHrTimeMs}`, - `metric_observer_sum{pid="123",core="1"} 0.999 ${mockedHrTimeMs}`, - `metric_observer{pid="123",core="1",quantile="0"} 0.999 ${mockedHrTimeMs}`, - `metric_observer{pid="123",core="1",quantile="1"} 0.999 ${mockedHrTimeMs}`, + '# TYPE metric_observer gauge', + `metric_observer{pid="123",core="1"} 0.999 ${mockedHrTimeMs}`, '', ]); - done(); }); }) @@ -537,11 +535,11 @@ describe('PrometheusExporter', () => { assert.deepStrictEqual(lines, [ '# HELP value_recorder a test description', - '# TYPE value_recorder summary', + '# TYPE value_recorder histogram', `value_recorder_count{key1="labelValue1"} 1 ${mockedHrTimeMs}`, `value_recorder_sum{key1="labelValue1"} 20 ${mockedHrTimeMs}`, - `value_recorder{key1="labelValue1",quantile="0"} 20 ${mockedHrTimeMs}`, - `value_recorder{key1="labelValue1",quantile="1"} 20 ${mockedHrTimeMs}`, + `value_recorder_bucket{key1="labelValue1",le="Infinity"} 1 ${mockedHrTimeMs}`, + `value_recorder_bucket{key1="labelValue1",le="+Inf"} 0 ${mockedHrTimeMs}`, '', ]); diff --git a/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts b/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts index b706e635d9..ff4eeca62d 100644 --- a/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts +++ b/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts @@ -15,12 +15,13 @@ */ import { SumAggregator, - MinMaxLastSumCountAggregator, HistogramAggregator, + LastValueAggregator, MeterProvider, CounterMetric, ValueRecorderMetric, UpDownCounterMetric, + ValueObserverMetric, } from '@opentelemetry/metrics'; import * as assert from 'assert'; import { Labels } from '@opentelemetry/api'; @@ -88,19 +89,24 @@ describe('PrometheusSerializer', () => { }); }); - describe('with MinMaxLastSumCountAggregator', () => { - mockAggregator(MinMaxLastSumCountAggregator); + describe('with LastValueAggregator', () => { + mockAggregator(LastValueAggregator); - it('should serialize metric record with sum aggregator', async () => { + it('should serialize metric record with LastValue aggregator', async () => { const serializer = new PrometheusSerializer(); const meter = new MeterProvider({ - batcher: new ExactBatcher(MinMaxLastSumCountAggregator), + batcher: new ExactBatcher(LastValueAggregator), }).getMeter('test'); - const counter = meter.createCounter('test') as CounterMetric; - counter.bind(labels).add(1); - - const records = await counter.getMetricRecord(); + const observer = meter.createValueObserver( + 'test', + {}, + observerResult => { + observerResult.observe(1, labels); + } + ) as ValueObserverMetric; + await meter.collect(); + const records = await observer.getMetricRecord(); const record = records[0]; const result = serializer.serializeRecord( @@ -109,10 +115,7 @@ describe('PrometheusSerializer', () => { ); assert.strictEqual( result, - `test_count{foo1="bar1",foo2="bar2"} 1 ${mockedHrTimeMs}\n` + - `test_sum{foo1="bar1",foo2="bar2"} 1 ${mockedHrTimeMs}\n` + - `test{foo1="bar1",foo2="bar2",quantile="0"} 1 ${mockedHrTimeMs}\n` + - `test{foo1="bar1",foo2="bar2",quantile="1"} 1 ${mockedHrTimeMs}\n` + `test{foo1="bar1",foo2="bar2"} 1 ${mockedHrTimeMs}\n` ); }); @@ -120,25 +123,24 @@ describe('PrometheusSerializer', () => { const serializer = new PrometheusSerializer(undefined, false); const meter = new MeterProvider({ - batcher: new ExactBatcher(MinMaxLastSumCountAggregator), + batcher: new ExactBatcher(LastValueAggregator), }).getMeter('test'); - const counter = meter.createCounter('test') as CounterMetric; - counter.bind(labels).add(1); - - const records = await counter.getMetricRecord(); + const observer = meter.createValueObserver( + 'test', + {}, + observerResult => { + observerResult.observe(1, labels); + } + ) as ValueObserverMetric; + await meter.collect(); + const records = await observer.getMetricRecord(); const record = records[0]; const result = serializer.serializeRecord( record.descriptor.name, record ); - assert.strictEqual( - result, - 'test_count{foo1="bar1",foo2="bar2"} 1\n' + - 'test_sum{foo1="bar1",foo2="bar2"} 1\n' + - 'test{foo1="bar1",foo2="bar2",quantile="0"} 1\n' + - 'test{foo1="bar1",foo2="bar2",quantile="1"} 1\n' - ); + assert.strictEqual(result, 'test{foo1="bar1",foo2="bar2"} 1\n'); }); }); @@ -262,23 +264,27 @@ describe('PrometheusSerializer', () => { }); }); - describe('with MinMaxLastSumCountAggregator', () => { - mockAggregator(MinMaxLastSumCountAggregator); + describe('with LastValueAggregator', () => { + mockAggregator(LastValueAggregator); - it('serialize metric record with MinMaxLastSumCountAggregator aggregator', async () => { + it('serialize metric record with LastValue aggregator', async () => { const serializer = new PrometheusSerializer(); const meter = new MeterProvider({ - batcher: new ExactBatcher(MinMaxLastSumCountAggregator), + batcher: new ExactBatcher(LastValueAggregator), }).getMeter('test'); const batcher = new PrometheusLabelsBatcher(); - const counter = meter.createCounter('test', { - description: 'foobar', - }) as CounterMetric; - counter.bind({ val: '1' }).add(1); - counter.bind({ val: '2' }).add(1); - - const records = await counter.getMetricRecord(); + const observer = meter.createValueObserver( + 'test', + { + description: 'foobar', + }, + observerResult => { + observerResult.observe(1, labels); + } + ) as ValueObserverMetric; + await meter.collect(); + const records = await observer.getMetricRecord(); records.forEach(it => batcher.process(it)); const checkPointSet = batcher.checkPointSet(); @@ -286,15 +292,8 @@ describe('PrometheusSerializer', () => { assert.strictEqual( result, '# HELP test foobar\n' + - '# TYPE test summary\n' + - `test_count{val="1"} 1 ${mockedHrTimeMs}\n` + - `test_sum{val="1"} 1 ${mockedHrTimeMs}\n` + - `test{val="1",quantile="0"} 1 ${mockedHrTimeMs}\n` + - `test{val="1",quantile="1"} 1 ${mockedHrTimeMs}\n` + - `test_count{val="2"} 1 ${mockedHrTimeMs}\n` + - `test_sum{val="2"} 1 ${mockedHrTimeMs}\n` + - `test{val="2",quantile="0"} 1 ${mockedHrTimeMs}\n` + - `test{val="2",quantile="1"} 1 ${mockedHrTimeMs}\n` + '# TYPE test gauge\n' + + `test{foo1="bar1",foo2="bar2"} 1 ${mockedHrTimeMs}\n` ); }); }); diff --git a/packages/opentelemetry-metrics/src/export/Batcher.ts b/packages/opentelemetry-metrics/src/export/Batcher.ts index 18e871aa63..d5baccbcf0 100644 --- a/packages/opentelemetry-metrics/src/export/Batcher.ts +++ b/packages/opentelemetry-metrics/src/export/Batcher.ts @@ -56,10 +56,13 @@ export class UngroupedBatcher extends Batcher { case MetricKind.UP_DOWN_SUM_OBSERVER: return new aggregators.SumAggregator(); case MetricKind.VALUE_RECORDER: + return new aggregators.HistogramAggregator( + metricDescriptor.boundaries || [Infinity] + ); case MetricKind.VALUE_OBSERVER: - return new aggregators.MinMaxLastSumCountAggregator(); + return new aggregators.LastValueAggregator(); default: - return new aggregators.MinMaxLastSumCountAggregator(); + return new aggregators.LastValueAggregator(); } } diff --git a/packages/opentelemetry-metrics/src/export/ConsoleMetricExporter.ts b/packages/opentelemetry-metrics/src/export/ConsoleMetricExporter.ts index 988a527da1..0b4977e98d 100644 --- a/packages/opentelemetry-metrics/src/export/ConsoleMetricExporter.ts +++ b/packages/opentelemetry-metrics/src/export/ConsoleMetricExporter.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { MetricExporter, MetricRecord, Distribution, Histogram } from './types'; +import { MetricExporter, MetricRecord, Histogram } from './types'; import { ExportResult } from '@opentelemetry/core'; /** @@ -38,17 +38,7 @@ export class ConsoleMetricExporter implements MetricExporter { `count: ${histogram.count}, sum: ${histogram.sum}, buckets: ${histogram.buckets}` ); } else { - const distribution = point.value as Distribution; - console.log( - 'min: ' + - distribution.min + - ', max: ' + - distribution.max + - ', count: ' + - distribution.count + - ', sum: ' + - distribution.sum - ); + console.log(point.value); } } return resultCallback(ExportResult.SUCCESS); diff --git a/packages/opentelemetry-metrics/src/export/aggregators/MinMaxLastSumCount.ts b/packages/opentelemetry-metrics/src/export/aggregators/LastValue.ts similarity index 50% rename from packages/opentelemetry-metrics/src/export/aggregators/MinMaxLastSumCount.ts rename to packages/opentelemetry-metrics/src/export/aggregators/LastValue.ts index ee030f7247..b2462f497a 100644 --- a/packages/opentelemetry-metrics/src/export/aggregators/MinMaxLastSumCount.ts +++ b/packages/opentelemetry-metrics/src/export/aggregators/LastValue.ts @@ -14,42 +14,29 @@ * limitations under the License. */ -import { Point, AggregatorKind, DistributionAggregatorType } from '../types'; +import { + AggregatorKind, + LastValue, + LastValueAggregatorType, + Point, +} from '../types'; import { HrTime } from '@opentelemetry/api'; import { hrTime } from '@opentelemetry/core'; -import { Distribution } from '../types'; -/** - * Basic aggregator keeping all raw values (events, sum, max, last and min). - */ -export class MinMaxLastSumCountAggregator - implements DistributionAggregatorType { - public kind: AggregatorKind.DISTRIBUTION = AggregatorKind.DISTRIBUTION; - private _distribution: Distribution; +/** Basic aggregator for LastValue which keeps the last recorded value. */ +export class LastValueAggregator implements LastValueAggregatorType { + private _current: number = 0; private _lastUpdateTime: HrTime = [0, 0]; - - constructor() { - this._distribution = { - min: Infinity, - max: -Infinity, - last: 0, - sum: 0, - count: 0, - }; - } + kind: AggregatorKind.LAST_VALUE = AggregatorKind.LAST_VALUE; update(value: number): void { - this._distribution.count++; - this._distribution.sum += value; - this._distribution.last = value; - this._distribution.min = Math.min(this._distribution.min, value); - this._distribution.max = Math.max(this._distribution.max, value); + this._current = value; this._lastUpdateTime = hrTime(); } - toPoint(): Point { + toPoint(): Point { return { - value: this._distribution, + value: this._current, timestamp: this._lastUpdateTime, }; } diff --git a/packages/opentelemetry-metrics/src/export/aggregators/index.ts b/packages/opentelemetry-metrics/src/export/aggregators/index.ts index 9ff9f904ea..4f531c287d 100644 --- a/packages/opentelemetry-metrics/src/export/aggregators/index.ts +++ b/packages/opentelemetry-metrics/src/export/aggregators/index.ts @@ -15,5 +15,5 @@ */ export * from './Histogram'; -export * from './MinMaxLastSumCount'; +export * from './LastValue'; export * from './Sum'; diff --git a/packages/opentelemetry-metrics/src/export/types.ts b/packages/opentelemetry-metrics/src/export/types.ts index 0820e5c8af..c5d3c8e620 100644 --- a/packages/opentelemetry-metrics/src/export/types.ts +++ b/packages/opentelemetry-metrics/src/export/types.ts @@ -29,11 +29,12 @@ export enum MetricKind { BATCH_OBSERVER, } +export const MetricKindValues = Object.values(MetricKind); + /** The kind of aggregator. */ export enum AggregatorKind { SUM, LAST_VALUE, - DISTRIBUTION, HISTOGRAM, } @@ -43,15 +44,6 @@ export type Sum = number; /** LastValue returns last value. */ export type LastValue = number; -/** Distribution returns an aggregated distribution. */ -export interface Distribution { - min: number; - max: number; - last: number; - count: number; - sum: number; -} - export interface Histogram { /** * Buckets are implemented using two different arrays: @@ -77,7 +69,7 @@ export interface Histogram { count: number; } -export type PointValueType = Sum | LastValue | Distribution | Histogram; +export type PointValueType = Sum | LastValue | Histogram; export interface MetricRecord { readonly descriptor: MetricDescriptor; @@ -93,6 +85,7 @@ export interface MetricDescriptor { readonly unit: string; readonly metricKind: MetricKind; readonly valueType: ValueType; + readonly boundaries?: number[]; } /** @@ -142,17 +135,6 @@ export interface LastValueAggregatorType extends BaseAggregator { toPoint(): Point; } -/** - * DistributionAggregatorType aggregate values into a {@link Distribution} - * point type. - */ -export interface DistributionAggregatorType extends BaseAggregator { - kind: AggregatorKind.DISTRIBUTION; - - /** Returns snapshot of the current point (value with timestamp). */ - toPoint(): Point; -} - /** * HistogramAggregatorType aggregate values into a {@link Histogram} point * type. @@ -167,7 +149,6 @@ export interface HistogramAggregatorType extends BaseAggregator { export type Aggregator = | SumAggregatorType | LastValueAggregatorType - | DistributionAggregatorType | HistogramAggregatorType; /** diff --git a/packages/opentelemetry-metrics/test/Meter.test.ts b/packages/opentelemetry-metrics/test/Meter.test.ts index 81ce0a31aa..0940a3057c 100644 --- a/packages/opentelemetry-metrics/test/Meter.test.ts +++ b/packages/opentelemetry-metrics/test/Meter.test.ts @@ -29,8 +29,9 @@ import { Aggregator, MetricDescriptor, UpDownCounterMetric, - Distribution, - MinMaxLastSumCountAggregator, + LastValueAggregator, + LastValue, + Histogram, } from '../src'; import * as api from '@opentelemetry/api'; import { NoopLogger, hrTime, hrTimeToNanoseconds } from '@opentelemetry/core'; @@ -623,12 +624,13 @@ describe('Meter', () => { await meter.collect(); const [record1] = meter.getBatcher().checkPointSet(); assert.deepStrictEqual( - record1.aggregator.toPoint().value as Distribution, + record1.aggregator.toPoint().value as Histogram, { + buckets: { + boundaries: [Infinity], + counts: [0, 0], + }, count: 0, - last: 0, - max: -Infinity, - min: Infinity, sum: 0, } ); @@ -643,12 +645,13 @@ describe('Meter', () => { await meter.collect(); const [record1] = meter.getBatcher().checkPointSet(); assert.deepStrictEqual( - record1.aggregator.toPoint().value as Distribution, + record1.aggregator.toPoint().value as Histogram, { + buckets: { + boundaries: [Infinity], + counts: [2, 0], + }, count: 2, - last: 50, - max: 50, - min: -10, sum: 40, } ); @@ -669,12 +672,13 @@ describe('Meter', () => { await meter.collect(); const [record1] = meter.getBatcher().checkPointSet(); assert.deepStrictEqual( - record1.aggregator.toPoint().value as Distribution, + record1.aggregator.toPoint().value as Histogram, { + buckets: { + boundaries: [Infinity], + counts: [2, 0], + }, count: 2, - last: 100, - max: 100, - min: 10, sum: 110, } ); @@ -693,14 +697,14 @@ describe('Meter', () => { boundValueRecorder.record(val); await meter.collect(); const [record1] = meter.getBatcher().checkPointSet(); - assert.deepStrictEqual( - record1.aggregator.toPoint().value as Distribution, + record1.aggregator.toPoint().value as Histogram, { + buckets: { + boundaries: [Infinity], + counts: [0, 0], + }, count: 0, - last: 0, - max: -Infinity, - min: Infinity, sum: 0, } ); @@ -768,12 +772,14 @@ describe('Meter', () => { it('should set callback and observe value ', async () => { let counter = 0; + function getValue() { if (++counter % 2 == 0) { return -1; } return 3; } + const sumObserver = meter.createSumObserver( 'name', { @@ -987,6 +993,7 @@ describe('Meter', () => { it('should set callback and observe value ', async () => { let counter = 0; + function getValue() { counter++; if (counter % 2 === 0) { @@ -994,6 +1001,7 @@ describe('Meter', () => { } return 3; } + const upDownSumObserver = meter.createUpDownSumObserver( 'name', { @@ -1192,34 +1200,10 @@ describe('Meter', () => { assert.strictEqual(hashLabels(metric3.labels), '|#app:app2,core:1'); assert.strictEqual(hashLabels(metric4.labels), '|#app:app2,core:2'); - ensureMetric(metric1, 'cpu_temp_per_app', { - count: 1, - last: 67, - max: 67, - min: 67, - sum: 67, - }); - ensureMetric(metric2, 'cpu_temp_per_app', { - count: 1, - last: 69, - max: 69, - min: 69, - sum: 69, - }); - ensureMetric(metric3, 'cpu_temp_per_app', { - count: 1, - last: 67, - max: 67, - min: 67, - sum: 67, - }); - ensureMetric(metric4, 'cpu_temp_per_app', { - count: 1, - last: 69, - max: 69, - min: 69, - sum: 69, - }); + ensureMetric(metric1, 'cpu_temp_per_app', 67); + ensureMetric(metric2, 'cpu_temp_per_app', 69); + ensureMetric(metric3, 'cpu_temp_per_app', 67); + ensureMetric(metric4, 'cpu_temp_per_app', 69); const metric5 = records[4]; const metric6 = records[5]; @@ -1230,34 +1214,10 @@ describe('Meter', () => { assert.strictEqual(hashLabels(metric7.labels), '|#app:app2,core:1'); assert.strictEqual(hashLabels(metric8.labels), '|#app:app2,core:2'); - ensureMetric(metric5, 'cpu_usage_per_app', { - count: 1, - last: 2.1, - max: 2.1, - min: 2.1, - sum: 2.1, - }); - ensureMetric(metric6, 'cpu_usage_per_app', { - count: 1, - last: 3.1, - max: 3.1, - min: 3.1, - sum: 3.1, - }); - ensureMetric(metric7, 'cpu_usage_per_app', { - count: 1, - last: 1.2, - max: 1.2, - min: 1.2, - sum: 1.2, - }); - ensureMetric(metric8, 'cpu_usage_per_app', { - count: 1, - last: 4.5, - max: 4.5, - min: 4.5, - sum: 4.5, - }); + ensureMetric(metric5, 'cpu_usage_per_app', 2.1); + ensureMetric(metric6, 'cpu_usage_per_app', 3.1); + ensureMetric(metric7, 'cpu_usage_per_app', 1.2); + ensureMetric(metric8, 'cpu_usage_per_app', 4.5); }); it('should not observe values when timeout', done => { @@ -1287,15 +1247,9 @@ describe('Meter', () => { const value = cpuUsageMetric .bind({ foo: 'bar' }) .getAggregator() - .toPoint().value as Distribution; - - assert.deepStrictEqual(value, { - count: 0, - last: 0, - max: -Infinity, - min: Infinity, - sum: 0, - }); + .toPoint().value; + + assert.deepStrictEqual(value, 0); assert.strictEqual(cpuUsageMetricRecords.length, 0); done(); }); @@ -1393,22 +1347,17 @@ class CustomBatcher extends Batcher { process(record: MetricRecord): void { throw new Error('process method not implemented.'); } + aggregatorFor(metricKind: MetricDescriptor): Aggregator { throw new Error('aggregatorFor method not implemented.'); } } -function ensureMetric( - metric: MetricRecord, - name?: string, - value?: Distribution -) { - assert.ok(metric.aggregator instanceof MinMaxLastSumCountAggregator); - const distribution = metric.aggregator.toPoint().value as Distribution; +function ensureMetric(metric: MetricRecord, name?: string, value?: LastValue) { + assert.ok(metric.aggregator instanceof LastValueAggregator); + const lastValue = metric.aggregator.toPoint().value; if (value) { - assert.deepStrictEqual(distribution, value); - } else { - assert.ok(distribution.last >= 0 && distribution.last <= 1); + assert.deepStrictEqual(lastValue, value); } const descriptor = metric.descriptor; assert.strictEqual(descriptor.name, name || 'name'); From 1fc672f4a99c9b3a99d229cd5de94a327f4b32b0 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Fri, 9 Oct 2020 19:28:45 +0200 Subject: [PATCH 03/10] chore: removing examples from lerna bootstrap --- lerna.json | 2 -- 1 file changed, 2 deletions(-) diff --git a/lerna.json b/lerna.json index e78656cafa..4e651f4a2c 100644 --- a/lerna.json +++ b/lerna.json @@ -2,8 +2,6 @@ "lerna": "3.13.4", "npmClient": "npm", "packages": [ - "examples/metrics", - "examples/collector-exporter-node", "benchmark/*", "backwards-compatability/*", "metapackages/*", From 440bf396901902a2e478873e10becaf3d6090553 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Fri, 9 Oct 2020 19:31:42 +0200 Subject: [PATCH 04/10] chore: cleaning ups unused interfaces --- .../src/types.ts | 23 ------------------- 1 file changed, 23 deletions(-) diff --git a/packages/opentelemetry-exporter-collector/src/types.ts b/packages/opentelemetry-exporter-collector/src/types.ts index c3c04a52b0..ba8c474caf 100644 --- a/packages/opentelemetry-exporter-collector/src/types.ts +++ b/packages/opentelemetry-exporter-collector/src/types.ts @@ -55,12 +55,6 @@ export namespace opentelemetryProto { doubleSum?: opentelemetryProto.metrics.v1.Sum; intHistogram?: opentelemetryProto.metrics.v1.Histogram; doubleHistogram?: opentelemetryProto.metrics.v1.Histogram; - - // metricDescriptor: opentelemetryProto.metrics.v1.MetricDescriptor; - // int64DataPoints?: opentelemetryProto.metrics.v1.Int64DataPoint[]; - // doubleDataPoints?: opentelemetryProto.metrics.v1.DoubleDataPoint[]; - // histogramDataPoints?: opentelemetryProto.metrics.v1.HistogramDataPoint[]; - // summaryDataPoints?: opentelemetryProto.metrics.v1.SummaryDataPoint[]; } export interface Gauge { @@ -115,23 +109,6 @@ export namespace opentelemetryProto { instrumentationLibraryMetrics: opentelemetryProto.metrics.v1.InstrumentationLibraryMetrics[]; } - // export enum MetricDescriptorType { - // INVALID_TYPE, - // INT64, - // MONOTONIC_INT64, - // DOUBLE, - // MONOTONIC_DOUBLE, - // HISTOGRAM, - // SUMMARY, - // } - - // export enum MetricDescriptorTemporality { - // INVALID_TEMPORALITY, - // INSTANTANEOUS, - // DELTA, - // CUMULATIVE, - // } - export enum AggregationTemporality { // UNSPECIFIED is the default AggregationTemporality, it MUST not be used. AGGREGATION_TEMPORALITY_UNSPECIFIED = 0, From 4752a0ea10462117596f0c2195c03e5f573b545e Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Fri, 9 Oct 2020 19:37:46 +0200 Subject: [PATCH 05/10] chore: fixing unit test --- .../test/PrometheusExporter.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts b/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts index f16fb47256..81af56ad8c 100644 --- a/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts +++ b/packages/opentelemetry-exporter-prometheus/test/PrometheusExporter.test.ts @@ -539,7 +539,7 @@ describe('PrometheusExporter', () => { `value_recorder_count{key1="labelValue1"} 1 ${mockedHrTimeMs}`, `value_recorder_sum{key1="labelValue1"} 20 ${mockedHrTimeMs}`, `value_recorder_bucket{key1="labelValue1",le="Infinity"} 1 ${mockedHrTimeMs}`, - `value_recorder_bucket{key1="labelValue1",le="+Inf"} 0 ${mockedHrTimeMs}`, + `value_recorder_bucket{key1="labelValue1",le="+Inf"} 1 ${mockedHrTimeMs}`, '', ]); From d41cd879a34dda41bd09ae8e8db76600275a4fca Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Mon, 12 Oct 2020 22:32:49 +0200 Subject: [PATCH 06/10] chore: updating aggregation temporality rules --- .../opentelemetry-exporter-collector-grpc/test/helper.ts | 2 +- .../opentelemetry-exporter-collector-proto/test/helper.ts | 2 +- .../src/transformMetrics.ts | 8 ++++---- packages/opentelemetry-exporter-collector/test/helper.ts | 4 ++-- .../test/node/CollectorMetricExporter.test.ts | 1 - 5 files changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts index a7b3be8bee..c33fa241ae 100644 --- a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts @@ -342,7 +342,7 @@ export function ensureExportedCounterIsCorrect( }, ], isMonotonic: true, - aggregationTemporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE', + aggregationTemporality: 'AGGREGATION_TEMPORALITY_DELTA', }, }); } diff --git a/packages/opentelemetry-exporter-collector-proto/test/helper.ts b/packages/opentelemetry-exporter-collector-proto/test/helper.ts index 25f613b51d..84d12d0b14 100644 --- a/packages/opentelemetry-exporter-collector-proto/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-proto/test/helper.ts @@ -301,7 +301,7 @@ export function ensureExportedCounterIsCorrect( }, ], isMonotonic: true, - aggregationTemporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE', + aggregationTemporality: 'AGGREGATION_TEMPORALITY_DELTA', }, }); } diff --git a/packages/opentelemetry-exporter-collector/src/transformMetrics.ts b/packages/opentelemetry-exporter-collector/src/transformMetrics.ts index 522188e24e..f02ee27e8d 100644 --- a/packages/opentelemetry-exporter-collector/src/transformMetrics.ts +++ b/packages/opentelemetry-exporter-collector/src/transformMetrics.ts @@ -49,17 +49,17 @@ export function toAggregationTemporality( ): opentelemetryProto.metrics.v1.AggregationTemporality { if ( metric.descriptor.metricKind === MetricKind.COUNTER || - metric.descriptor.metricKind === MetricKind.SUM_OBSERVER + metric.descriptor.metricKind === MetricKind.UP_DOWN_COUNTER ) { return opentelemetryProto.metrics.v1.AggregationTemporality - .AGGREGATION_TEMPORALITY_CUMULATIVE; + .AGGREGATION_TEMPORALITY_DELTA; } if ( - metric.descriptor.metricKind === MetricKind.UP_DOWN_COUNTER || + metric.descriptor.metricKind === MetricKind.SUM_OBSERVER || metric.descriptor.metricKind === MetricKind.UP_DOWN_SUM_OBSERVER ) { return opentelemetryProto.metrics.v1.AggregationTemporality - .AGGREGATION_TEMPORALITY_DELTA; + .AGGREGATION_TEMPORALITY_CUMULATIVE; } if ( metric.descriptor.metricKind === MetricKind.VALUE_OBSERVER || diff --git a/packages/opentelemetry-exporter-collector/test/helper.ts b/packages/opentelemetry-exporter-collector/test/helper.ts index 20df72696d..0fb5646dcb 100644 --- a/packages/opentelemetry-exporter-collector/test/helper.ts +++ b/packages/opentelemetry-exporter-collector/test/helper.ts @@ -509,7 +509,7 @@ export function ensureCounterIsCorrect( isMonotonic: true, aggregationTemporality: collectorTypes.opentelemetryProto.metrics.v1.AggregationTemporality - .AGGREGATION_TEMPORALITY_CUMULATIVE, + .AGGREGATION_TEMPORALITY_DELTA, }, }); } @@ -534,7 +534,7 @@ export function ensureDoubleCounterIsCorrect( isMonotonic: true, aggregationTemporality: collectorTypes.opentelemetryProto.metrics.v1.AggregationTemporality - .AGGREGATION_TEMPORALITY_CUMULATIVE, + .AGGREGATION_TEMPORALITY_DELTA, }, }); } diff --git a/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts b/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts index ad8cddd07b..85f447cbab 100644 --- a/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts +++ b/packages/opentelemetry-exporter-collector/test/node/CollectorMetricExporter.test.ts @@ -210,7 +210,6 @@ describe('CollectorMetricExporter - node with json over http', () => { callback(mockResError); setTimeout(() => { const response = spyLoggerError.args[0][0] as string; - console.log(response); assert.ok(response.includes('"code":"400"')); assert.strictEqual(responseSpy.args[0][0], 1); done(); From 1175b4132129abb6e28bf5de8eb608fa65a032c0 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Tue, 13 Oct 2020 19:34:59 +0200 Subject: [PATCH 07/10] chore: updating temporality for value recorder --- .../test/helper.ts | 2 +- .../test/helper.ts | 2 +- .../src/transformMetrics.ts | 16 +++++++++++----- .../test/helper.ts | 2 +- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts index c33fa241ae..0607571c9c 100644 --- a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts @@ -392,7 +392,7 @@ export function ensureExportedValueRecorderIsCorrect( explicitBounds: [Infinity], }, ], - aggregationTemporality: 'AGGREGATION_TEMPORALITY_DELTA', + aggregationTemporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE', }, }); } diff --git a/packages/opentelemetry-exporter-collector-proto/test/helper.ts b/packages/opentelemetry-exporter-collector-proto/test/helper.ts index 84d12d0b14..8f59de6fa0 100644 --- a/packages/opentelemetry-exporter-collector-proto/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-proto/test/helper.ts @@ -345,7 +345,7 @@ export function ensureExportedValueRecorderIsCorrect( explicitBounds: ['Infinity'], }, ], - aggregationTemporality: 'AGGREGATION_TEMPORALITY_DELTA', + aggregationTemporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE', }, }); } diff --git a/packages/opentelemetry-exporter-collector/src/transformMetrics.ts b/packages/opentelemetry-exporter-collector/src/transformMetrics.ts index f02ee27e8d..eaf73fc2c9 100644 --- a/packages/opentelemetry-exporter-collector/src/transformMetrics.ts +++ b/packages/opentelemetry-exporter-collector/src/transformMetrics.ts @@ -54,6 +54,7 @@ export function toAggregationTemporality( return opentelemetryProto.metrics.v1.AggregationTemporality .AGGREGATION_TEMPORALITY_DELTA; } + if ( metric.descriptor.metricKind === MetricKind.SUM_OBSERVER || metric.descriptor.metricKind === MetricKind.UP_DOWN_SUM_OBSERVER @@ -61,13 +62,18 @@ export function toAggregationTemporality( return opentelemetryProto.metrics.v1.AggregationTemporality .AGGREGATION_TEMPORALITY_CUMULATIVE; } - if ( - metric.descriptor.metricKind === MetricKind.VALUE_OBSERVER || - metric.descriptor.metricKind === MetricKind.VALUE_RECORDER - ) { + + if (metric.descriptor.metricKind === MetricKind.VALUE_OBSERVER) { return opentelemetryProto.metrics.v1.AggregationTemporality - .AGGREGATION_TEMPORALITY_DELTA; + .AGGREGATION_TEMPORALITY_UNSPECIFIED; } + + // until spec is resolved keep it as unspecified + if (metric.descriptor.metricKind === MetricKind.VALUE_RECORDER) { + return opentelemetryProto.metrics.v1.AggregationTemporality + .AGGREGATION_TEMPORALITY_CUMULATIVE; + } + return opentelemetryProto.metrics.v1.AggregationTemporality .AGGREGATION_TEMPORALITY_UNSPECIFIED; } diff --git a/packages/opentelemetry-exporter-collector/test/helper.ts b/packages/opentelemetry-exporter-collector/test/helper.ts index 0fb5646dcb..eb7d81f57d 100644 --- a/packages/opentelemetry-exporter-collector/test/helper.ts +++ b/packages/opentelemetry-exporter-collector/test/helper.ts @@ -583,7 +583,7 @@ export function ensureValueRecorderIsCorrect( ], aggregationTemporality: collectorTypes.opentelemetryProto.metrics.v1.AggregationTemporality - .AGGREGATION_TEMPORALITY_DELTA, + .AGGREGATION_TEMPORALITY_CUMULATIVE, }, }); } From a0883080a901c7ca4fda1ba11acccd5360a3c178 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Tue, 13 Oct 2020 22:10:31 +0200 Subject: [PATCH 08/10] chore: removing unneeded lib --- examples/collector-exporter-node/package.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/collector-exporter-node/package.json b/examples/collector-exporter-node/package.json index 96601ff0b4..23bfcce4a5 100644 --- a/examples/collector-exporter-node/package.json +++ b/examples/collector-exporter-node/package.json @@ -34,8 +34,7 @@ "@opentelemetry/exporter-collector-grpc": "^0.11.0", "@opentelemetry/exporter-collector-proto": "^0.11.0", "@opentelemetry/metrics": "^0.11.0", - "@opentelemetry/tracing": "^0.11.0", - "grpc": "^1.24.2" + "@opentelemetry/tracing": "^0.11.0" }, "homepage": "https://github.com/open-telemetry/opentelemetry-js#readme" } From 58e155064632a083406250a8aa181571e31179c8 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Wed, 14 Oct 2020 15:56:26 +0200 Subject: [PATCH 09/10] chore: span id and trace id as hex --- .../docker/docker-compose.yaml | 4 +- .../test/helper.ts | 42 +++++++++++-------- .../test/helper.ts | 16 +++---- .../src/transform.ts | 12 +++--- .../test/helper.ts | 16 +++---- 5 files changed, 48 insertions(+), 42 deletions(-) diff --git a/examples/collector-exporter-node/docker/docker-compose.yaml b/examples/collector-exporter-node/docker/docker-compose.yaml index c04c16a124..3e4355ea23 100644 --- a/examples/collector-exporter-node/docker/docker-compose.yaml +++ b/examples/collector-exporter-node/docker/docker-compose.yaml @@ -2,8 +2,8 @@ version: "3" services: # Collector collector: -# image: otel/opentelemetry-collector:0.12.0 - image: otel/opentelemetry-collector:latest + image: otel/opentelemetry-collector:0.12.0 +# image: otel/opentelemetry-collector:latest command: ["--config=/conf/collector-config.yaml", "--log-level=DEBUG"] volumes: - ./collector-config.yaml:/conf/collector-config.yaml diff --git a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts index 0607571c9c..3d5abd38de 100644 --- a/packages/opentelemetry-exporter-collector-grpc/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-grpc/test/helper.ts @@ -34,25 +34,33 @@ const meterProvider = new MeterProvider({ const meter = meterProvider.getMeter('default', '0.0.1'); const traceIdArr = [ - 31, - 16, - 8, - 220, - 142, - 39, - 14, - 133, - 196, - 10, - 13, - 124, - 57, + 213, + 253, + 116, + 211, + 199, + 92, + 241, + 237, + 187, + 209, + 239, 57, - 178, - 120, + 115, + 141, + 26, + 209, + 222, + 220, + 223, + 221, + 253, + 111, + 110, + 252, ]; -const spanIdArr = [94, 16, 114, 97, 246, 79, 165, 62]; -const parentIdArr = [120, 168, 145, 80, 152, 134, 67, 136]; +const spanIdArr = [229, 237, 116, 239, 110, 181, 127, 174, 31, 107, 157, 222]; +const parentIdArr = [239, 198, 188, 247, 94, 116, 247, 207, 58, 227, 127, 60]; export async function mockCounter(): Promise { const name = 'int-counter'; diff --git a/packages/opentelemetry-exporter-collector-proto/test/helper.ts b/packages/opentelemetry-exporter-collector-proto/test/helper.ts index 8f59de6fa0..0217aa130d 100644 --- a/packages/opentelemetry-exporter-collector-proto/test/helper.ts +++ b/packages/opentelemetry-exporter-collector-proto/test/helper.ts @@ -89,9 +89,9 @@ export async function mockValueRecorder(): Promise { return (await metric.getMetricRecord())[0]; } -const traceIdBase64 = 'HxAI3I4nDoXECg18OTmyeA=='; -const spanIdBase64 = 'XhByYfZPpT4='; -const parentIdBase64 = 'eKiRUJiGQ4g='; +const traceIdHex = '1f1008dc8e270e85c40a0d7c3939b278'; +const spanIdHex = '5e107261f64fa53e'; +const parentIdHex = '78a8915098864388'; export const mockedReadableSpan: ReadableSpan = { name: 'documentFetch', @@ -222,8 +222,8 @@ export function ensureProtoLinksAreCorrect( attributes, [ { - traceId: traceIdBase64, - spanId: parentIdBase64, + traceId: traceIdHex, + spanId: parentIdHex, attributes: [ { key: 'component', @@ -251,11 +251,11 @@ export function ensureProtoSpanIsCorrect( if (span.links) { ensureProtoLinksAreCorrect(span.links); } - assert.deepStrictEqual(span.traceId, traceIdBase64, 'traceId is wrong'); - assert.deepStrictEqual(span.spanId, spanIdBase64, 'spanId is wrong'); + assert.deepStrictEqual(span.traceId, traceIdHex, 'traceId is wrong'); + assert.deepStrictEqual(span.spanId, spanIdHex, 'spanId is wrong'); assert.deepStrictEqual( span.parentSpanId, - parentIdBase64, + parentIdHex, 'parentIdArr is wrong' ); assert.strictEqual(span.name, 'documentFetch', 'name is wrong'); diff --git a/packages/opentelemetry-exporter-collector/src/transform.ts b/packages/opentelemetry-exporter-collector/src/transform.ts index 005f41a1bc..719ea567a0 100644 --- a/packages/opentelemetry-exporter-collector/src/transform.ts +++ b/packages/opentelemetry-exporter-collector/src/transform.ts @@ -139,8 +139,8 @@ export function toCollectorLinks( ): opentelemetryProto.trace.v1.Span.Link[] { return span.links.map((link: Link) => { const protoLink: opentelemetryProto.trace.v1.Span.Link = { - traceId: core.hexToBase64(link.context.traceId), - spanId: core.hexToBase64(link.context.spanId), + traceId: link.context.traceId, + spanId: link.context.spanId, attributes: toCollectorAttributes(link.attributes || {}), droppedAttributesCount: 0, }; @@ -156,11 +156,9 @@ export function toCollectorSpan( span: ReadableSpan ): opentelemetryProto.trace.v1.Span { return { - traceId: core.hexToBase64(span.spanContext.traceId), - spanId: core.hexToBase64(span.spanContext.spanId), - parentSpanId: span.parentSpanId - ? core.hexToBase64(span.parentSpanId) - : undefined, + traceId: span.spanContext.traceId, + spanId: span.spanContext.spanId, + parentSpanId: span.parentSpanId ? span.parentSpanId : undefined, traceState: toCollectorTraceState(span.spanContext.traceState), name: span.name, kind: toCollectorKind(span.kind), diff --git a/packages/opentelemetry-exporter-collector/test/helper.ts b/packages/opentelemetry-exporter-collector/test/helper.ts index eb7d81f57d..e11257138e 100644 --- a/packages/opentelemetry-exporter-collector/test/helper.ts +++ b/packages/opentelemetry-exporter-collector/test/helper.ts @@ -99,9 +99,9 @@ export async function mockValueRecorder(): Promise { return (await metric.getMetricRecord())[0]; } -const traceIdBase64 = 'HxAI3I4nDoXECg18OTmyeA=='; -const spanIdBase64 = 'XhByYfZPpT4='; -const parentIdBase64 = 'eKiRUJiGQ4g='; +const traceIdHex = '1f1008dc8e270e85c40a0d7c3939b278'; +const spanIdHex = '5e107261f64fa53e'; +const parentIdHex = '78a8915098864388'; export const mockedReadableSpan: ReadableSpan = { name: 'documentFetch', @@ -393,8 +393,8 @@ export function ensureLinksAreCorrect( attributes, [ { - traceId: traceIdBase64, - spanId: parentIdBase64, + traceId: traceIdHex, + spanId: parentIdHex, attributes: [ { key: 'component', @@ -422,11 +422,11 @@ export function ensureSpanIsCorrect( if (span.links) { ensureLinksAreCorrect(span.links); } - assert.deepStrictEqual(span.traceId, traceIdBase64, 'traceId is wrong'); - assert.deepStrictEqual(span.spanId, spanIdBase64, 'spanId is wrong'); + assert.deepStrictEqual(span.traceId, traceIdHex, 'traceId is wrong'); + assert.deepStrictEqual(span.spanId, spanIdHex, 'spanId is wrong'); assert.deepStrictEqual( span.parentSpanId, - parentIdBase64, + parentIdHex, 'parentIdArr is wrong' ); assert.strictEqual(span.name, 'documentFetch', 'name is wrong'); From 7d333db6134606885bbea7d4422dae0006d08d60 Mon Sep 17 00:00:00 2001 From: Bartlomiej Obecny Date: Wed, 14 Oct 2020 19:58:53 +0200 Subject: [PATCH 10/10] chore: adding value recorder to example --- examples/collector-exporter-node/metrics.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/examples/collector-exporter-node/metrics.js b/examples/collector-exporter-node/metrics.js index b6696778c8..5f59a6e191 100644 --- a/examples/collector-exporter-node/metrics.js +++ b/examples/collector-exporter-node/metrics.js @@ -25,9 +25,14 @@ const upDownCounter = meter.createUpDownCounter('test_up_down_counter', { description: 'Example of a UpDownCounter', }); +const recorder = meter.createValueRecorder('test_value_recorder', { + description: 'Example of a ValueRecorder', +}); + const labels = { pid: process.pid, environment: 'staging' }; setInterval(() => { requestCounter.bind(labels).add(1); upDownCounter.bind(labels).add(Math.random() > 0.5 ? 1 : -1); + recorder.bind(labels).record(Math.random()); }, 1000);