diff --git a/go.mod b/go.mod index 7c4fce57887..d19dbe3ff1c 100644 --- a/go.mod +++ b/go.mod @@ -47,7 +47,7 @@ require ( github.com/prometheus/client_golang v1.9.0 github.com/prometheus/client_model v0.2.0 github.com/prometheus/common v0.18.0 - github.com/prometheus/prometheus v1.8.2-0.20210319122004-e4f076f81302 + github.com/prometheus/prometheus v1.8.2-0.20210321183757-31a518faab18 github.com/segmentio/fasthash v0.0.0-20180216231524-a72b379d632e github.com/sony/gobreaker v0.4.1 github.com/spf13/afero v1.2.2 diff --git a/go.sum b/go.sum index 56486e14d0c..da6c4e1cc0f 100644 --- a/go.sum +++ b/go.sum @@ -1100,8 +1100,8 @@ github.com/prometheus/prometheus v1.8.2-0.20201029103703-63be30dceed9/go.mod h1: github.com/prometheus/prometheus v1.8.2-0.20201119142752-3ad25a6dc3d9/go.mod h1:1MDE/bXgu4gqd5w/otko6WQpXZX9vu8QX4KbitCmaPg= github.com/prometheus/prometheus v1.8.2-0.20201119181812-c8f810083d3f/go.mod h1:1MDE/bXgu4gqd5w/otko6WQpXZX9vu8QX4KbitCmaPg= github.com/prometheus/prometheus v1.8.2-0.20210215121130-6f488061dfb4/go.mod h1:NAYujktP0dmSSpeV155mtnwX2pndLpVVK/Ps68R01TA= -github.com/prometheus/prometheus v1.8.2-0.20210319122004-e4f076f81302 h1:Cn4/28Finy29V4D+Exbo2l3NVsio8GrfyYZl80U2JCI= -github.com/prometheus/prometheus v1.8.2-0.20210319122004-e4f076f81302/go.mod h1:MS/bpdil77lPbfQeKk6OqVQ9OLnpN3Rszd0hka0EOWE= +github.com/prometheus/prometheus v1.8.2-0.20210321183757-31a518faab18 h1:8chKJNOWv10FApdXgQ8Td8oYFrfFTbiBp/QpBaxEMRA= +github.com/prometheus/prometheus v1.8.2-0.20210321183757-31a518faab18/go.mod h1:MS/bpdil77lPbfQeKk6OqVQ9OLnpN3Rszd0hka0EOWE= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/rafaeljusto/redigomock v0.0.0-20190202135759-257e089e14a1/go.mod h1:JaY6n2sDr+z2WTsXkOmNRUfDy6FN0L6Nk7x06ndm4tY= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= diff --git a/vendor/github.com/prometheus/prometheus/config/config.go b/vendor/github.com/prometheus/prometheus/config/config.go index 792ef5c1b03..419c26a945e 100644 --- a/vendor/github.com/prometheus/prometheus/config/config.go +++ b/vendor/github.com/prometheus/prometheus/config/config.go @@ -117,7 +117,7 @@ var ( DefaultAlertmanagerConfig = AlertmanagerConfig{ Scheme: "http", Timeout: model.Duration(10 * time.Second), - APIVersion: AlertmanagerAPIVersionV1, + APIVersion: AlertmanagerAPIVersionV2, HTTPClientConfig: config.DefaultHTTPClientConfig, } diff --git a/vendor/github.com/prometheus/prometheus/promql/engine.go b/vendor/github.com/prometheus/prometheus/promql/engine.go index f963f454b31..5c9caebada3 100644 --- a/vendor/github.com/prometheus/prometheus/promql/engine.go +++ b/vendor/github.com/prometheus/prometheus/promql/engine.go @@ -902,6 +902,12 @@ func (ev *evaluator) Eval(expr parser.Expr) (v parser.Value, ws storage.Warnings return v, ws, nil } +// EvalSeriesHelper stores extra information about a series. +type EvalSeriesHelper struct { + // The grouping key used by aggregation. + groupingKey uint64 +} + // EvalNodeHelper stores extra information and caches for evaluating a single node across steps. type EvalNodeHelper struct { // Evaluation timestamp. @@ -962,10 +968,12 @@ func (enh *EvalNodeHelper) signatureFunc(on bool, names ...string) func(labels.L } // rangeEval evaluates the given expressions, and then for each step calls -// the given function with the values computed for each expression at that -// step. The return value is the combination into time series of all the +// the given funcCall with the values computed for each expression at that +// step. The return value is the combination into time series of all the // function call results. -func (ev *evaluator) rangeEval(funcCall func([]parser.Value, *EvalNodeHelper) (Vector, storage.Warnings), exprs ...parser.Expr) (Matrix, storage.Warnings) { +// The prepSeries function (if provided) can be used to prepare the helper +// for each series, then passed to each call funcCall. +func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper), funcCall func([]parser.Value, [][]EvalSeriesHelper, *EvalNodeHelper) (Vector, storage.Warnings), exprs ...parser.Expr) (Matrix, storage.Warnings) { numSteps := int((ev.endTimestamp-ev.startTimestamp)/ev.interval) + 1 matrixes := make([]Matrix, len(exprs)) origMatrixes := make([]Matrix, len(exprs)) @@ -1001,6 +1009,30 @@ func (ev *evaluator) rangeEval(funcCall func([]parser.Value, *EvalNodeHelper) (V enh := &EvalNodeHelper{Out: make(Vector, 0, biggestLen)} seriess := make(map[uint64]Series, biggestLen) // Output series by series hash. tempNumSamples := ev.currentSamples + + var ( + seriesHelpers [][]EvalSeriesHelper + bufHelpers [][]EvalSeriesHelper // Buffer updated on each step + ) + + // If the series preparation function is provided, we should run it for + // every single series in the matrix. + if prepSeries != nil { + seriesHelpers = make([][]EvalSeriesHelper, len(exprs)) + bufHelpers = make([][]EvalSeriesHelper, len(exprs)) + + for i := range exprs { + seriesHelpers[i] = make([]EvalSeriesHelper, len(matrixes[i])) + bufHelpers[i] = make([]EvalSeriesHelper, len(matrixes[i])) + + for si, series := range matrixes[i] { + h := seriesHelpers[i][si] + prepSeries(series.Metric, &h) + seriesHelpers[i][si] = h + } + } + } + for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval { if err := contextDone(ev.ctx, "expression evaluation"); err != nil { ev.error(err) @@ -1010,11 +1042,20 @@ func (ev *evaluator) rangeEval(funcCall func([]parser.Value, *EvalNodeHelper) (V // Gather input vectors for this timestamp. for i := range exprs { vectors[i] = vectors[i][:0] + + if prepSeries != nil { + bufHelpers[i] = bufHelpers[i][:0] + } + for si, series := range matrixes[i] { for _, point := range series.Points { if point.T == ts { if ev.currentSamples < ev.maxSamples { vectors[i] = append(vectors[i], Sample{Metric: series.Metric, Point: point}) + if prepSeries != nil { + bufHelpers[i] = append(bufHelpers[i], seriesHelpers[i][si]) + } + // Move input vectors forward so we don't have to re-scan the same // past points at the next step. matrixes[i][si].Points = series.Points[1:] @@ -1028,9 +1069,10 @@ func (ev *evaluator) rangeEval(funcCall func([]parser.Value, *EvalNodeHelper) (V } args[i] = vectors[i] } + // Make the function call. enh.Ts = ts - result, ws := funcCall(args, enh) + result, ws := funcCall(args, bufHelpers, enh) if result.ContainsSameLabelset() { ev.errorf("vector cannot contain metrics with the same labelset") } @@ -1132,18 +1174,29 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { switch e := expr.(type) { case *parser.AggregateExpr: + // Grouping labels must be sorted (expected both by generateGroupingKey() and aggregation()). + sortedGrouping := e.Grouping + sort.Strings(sortedGrouping) + + // Prepare a function to initialise series helpers with the grouping key. + buf := make([]byte, 0, 1024) + initSeries := func(series labels.Labels, h *EvalSeriesHelper) { + h.groupingKey, buf = generateGroupingKey(series, sortedGrouping, e.Without, buf) + } + unwrapParenExpr(&e.Param) if s, ok := unwrapStepInvariantExpr(e.Param).(*parser.StringLiteral); ok { - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return ev.aggregation(e.Op, e.Grouping, e.Without, s.Val, v[0].(Vector), enh), nil + return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.aggregation(e.Op, sortedGrouping, e.Without, s.Val, v[0].(Vector), sh[0], enh), nil }, e.Expr) } - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + + return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { var param float64 if e.Param != nil { param = v[0].(Vector)[0].V } - return ev.aggregation(e.Op, e.Grouping, e.Without, param, v[1].(Vector), enh), nil + return ev.aggregation(e.Op, sortedGrouping, e.Without, param, v[1].(Vector), sh[1], enh), nil }, e.Param, e.Expr) case *parser.Call: @@ -1156,7 +1209,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { arg := unwrapStepInvariantExpr(e.Args[0]) vs, ok := arg.(*parser.VectorSelector) if ok { - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { if vs.Timestamp != nil { // This is a special case only for "timestamp" since the offset // needs to be adjusted for every point. @@ -1200,7 +1253,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { } if !matrixArg { // Does not have a matrix argument. - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return call(v, e.Args, enh), warnings }, e.Args...) } @@ -1367,43 +1420,43 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { case *parser.BinaryExpr: switch lt, rt := e.LHS.Type(), e.RHS.Type(); { case lt == parser.ValueTypeScalar && rt == parser.ValueTypeScalar: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { val := scalarBinop(e.Op, v[0].(Vector)[0].Point.V, v[1].(Vector)[0].Point.V) return append(enh.Out, Sample{Point: Point{V: val}}), nil }, e.LHS, e.RHS) case lt == parser.ValueTypeVector && rt == parser.ValueTypeVector: switch e.Op { case parser.LAND: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.VectorAnd(v[0].(Vector), v[1].(Vector), e.VectorMatching, enh), nil }, e.LHS, e.RHS) case parser.LOR: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.VectorOr(v[0].(Vector), v[1].(Vector), e.VectorMatching, enh), nil }, e.LHS, e.RHS) case parser.LUNLESS: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.VectorUnless(v[0].(Vector), v[1].(Vector), e.VectorMatching, enh), nil }, e.LHS, e.RHS) default: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.VectorBinop(e.Op, v[0].(Vector), v[1].(Vector), e.VectorMatching, e.ReturnBool, enh), nil }, e.LHS, e.RHS) } case lt == parser.ValueTypeVector && rt == parser.ValueTypeScalar: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.VectorscalarBinop(e.Op, v[0].(Vector), Scalar{V: v[1].(Vector)[0].Point.V}, false, e.ReturnBool, enh), nil }, e.LHS, e.RHS) case lt == parser.ValueTypeScalar && rt == parser.ValueTypeVector: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.VectorscalarBinop(e.Op, v[1].(Vector), Scalar{V: v[0].(Vector)[0].Point.V}, true, e.ReturnBool, enh), nil }, e.LHS, e.RHS) } case *parser.NumberLiteral: - return ev.rangeEval(func(v []parser.Value, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return append(enh.Out, Sample{Point: Point{V: e.Val}}), nil }) @@ -2067,8 +2120,9 @@ type groupedAggregation struct { reverseHeap vectorByReverseValueHeap } -// aggregation evaluates an aggregation operation on a Vector. -func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without bool, param interface{}, vec Vector, enh *EvalNodeHelper) Vector { +// aggregation evaluates an aggregation operation on a Vector. The provided grouping labels +// must be sorted. +func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without bool, param interface{}, vec Vector, seriesHelper []EvalSeriesHelper, enh *EvalNodeHelper) Vector { result := map[uint64]*groupedAggregation{} var k int64 @@ -2087,35 +2141,43 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without q = param.(float64) } var valueLabel string + var recomputeGroupingKey bool if op == parser.COUNT_VALUES { valueLabel = param.(string) if !model.LabelName(valueLabel).IsValid() { ev.errorf("invalid label name %q", valueLabel) } if !without { + // We're changing the grouping labels so we have to ensure they're still sorted + // and we have to flag to recompute the grouping key. Considering the count_values() + // operator is less frequently used than other aggregations, we're fine having to + // re-compute the grouping key on each step for this case. grouping = append(grouping, valueLabel) + sort.Strings(grouping) + recomputeGroupingKey = true } } - sort.Strings(grouping) lb := labels.NewBuilder(nil) - buf := make([]byte, 0, 1024) - for _, s := range vec { + var buf []byte + for si, s := range vec { metric := s.Metric if op == parser.COUNT_VALUES { lb.Reset(metric) lb.Set(valueLabel, strconv.FormatFloat(s.V, 'f', -1, 64)) metric = lb.Labels() + + // We've changed the metric so we have to recompute the grouping key. + recomputeGroupingKey = true } - var ( - groupingKey uint64 - ) - if without { - groupingKey, buf = metric.HashWithoutLabels(buf, grouping...) + // We can use the pre-computed grouping key unless grouping labels have changed. + var groupingKey uint64 + if !recomputeGroupingKey { + groupingKey = seriesHelper[si].groupingKey } else { - groupingKey, buf = metric.HashForLabels(buf, grouping...) + groupingKey, buf = generateGroupingKey(metric, grouping, without, buf) } group, ok := result[groupingKey] @@ -2302,6 +2364,21 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without return enh.Out } +// groupingKey builds and returns the grouping key for the given metric and +// grouping labels. +func generateGroupingKey(metric labels.Labels, grouping []string, without bool, buf []byte) (uint64, []byte) { + if without { + return metric.HashWithoutLabels(buf, grouping...) + } + + if len(grouping) == 0 { + // No need to generate any hash if there are no grouping labels. + return 0, buf + } + + return metric.HashForLabels(buf, grouping...) +} + // btos returns 1 if b is true, 0 otherwise. func btos(b bool) float64 { if b { diff --git a/vendor/github.com/prometheus/prometheus/storage/interface.go b/vendor/github.com/prometheus/prometheus/storage/interface.go index f5fd19df861..eb2b5975ff6 100644 --- a/vendor/github.com/prometheus/prometheus/storage/interface.go +++ b/vendor/github.com/prometheus/prometheus/storage/interface.go @@ -180,6 +180,14 @@ type Appender interface { ExemplarAppender } +// GetRef is an extra interface on Appenders used by downstream projects +// (e.g. Cortex) to avoid maintaining a parallel set of references. +type GetRef interface { + // Returns reference number that can be used to pass to Appender.Append(). + // 0 means the appender does not have a reference to this series. + GetRef(lset labels.Labels) uint64 +} + // ExemplarAppender provides an interface for adding samples to exemplar storage, which // within Prometheus is in-memory only. type ExemplarAppender interface { diff --git a/vendor/github.com/prometheus/prometheus/tsdb/db.go b/vendor/github.com/prometheus/prometheus/tsdb/db.go index 8d58f5c5804..b8b3c9970fa 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/db.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/db.go @@ -795,6 +795,15 @@ type dbAppender struct { db *DB } +var _ storage.GetRef = dbAppender{} + +func (a dbAppender) GetRef(lset labels.Labels) uint64 { + if g, ok := a.Appender.(storage.GetRef); ok { + return g.GetRef(lset) + } + return 0 +} + func (a dbAppender) Commit() error { err := a.Appender.Commit() diff --git a/vendor/github.com/prometheus/prometheus/tsdb/head.go b/vendor/github.com/prometheus/prometheus/tsdb/head.go index cad73e23a83..4f4e8d51dd3 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/head.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/head.go @@ -1107,6 +1107,15 @@ func (a *initAppender) AppendExemplar(ref uint64, l labels.Labels, e exemplar.Ex return a.app.AppendExemplar(ref, l, e) } +var _ storage.GetRef = &initAppender{} + +func (a *initAppender) GetRef(lset labels.Labels) uint64 { + if g, ok := a.app.(storage.GetRef); ok { + return g.GetRef(lset) + } + return 0 +} + func (a *initAppender) Commit() error { if a.app == nil { return nil @@ -1331,6 +1340,16 @@ func (a *headAppender) AppendExemplar(ref uint64, _ labels.Labels, e exemplar.Ex return s.ref, nil } +var _ storage.GetRef = &headAppender{} + +func (a *headAppender) GetRef(lset labels.Labels) uint64 { + s := a.head.series.getByHash(lset.Hash(), lset) + if s == nil { + return 0 + } + return s.ref +} + func (a *headAppender) log() error { if a.head.wal == nil { return nil diff --git a/vendor/modules.txt b/vendor/modules.txt index d0561c6db6d..ce2706dcd79 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -469,7 +469,7 @@ github.com/prometheus/node_exporter/https github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/prometheus/prometheus v1.8.2-0.20210319122004-e4f076f81302 +# github.com/prometheus/prometheus v1.8.2-0.20210321183757-31a518faab18 ## explicit github.com/prometheus/prometheus/config github.com/prometheus/prometheus/discovery