From 8df6e6f72210eb0fda1caabc7852e660a82a5af4 Mon Sep 17 00:00:00 2001 From: Marco Pracucci Date: Fri, 5 Jul 2024 15:46:32 +0200 Subject: [PATCH] Update dskit and mimir-prometheus (#8632) * Update dskit and mimir-prometheus Signed-off-by: Marco Pracucci * Added missing files Signed-off-by: Marco Pracucci * Updated CHANGELOG Signed-off-by: Marco Pracucci --------- Signed-off-by: Marco Pracucci --- CHANGELOG.md | 2 + go.mod | 4 +- go.sum | 8 +- .../querymiddleware/astmapper/parallel.go | 2 + pkg/storage/ingest/reader.go | 14 +- .../grafana/dskit/backoff/backoff.go | 11 +- .../prometheus/model/labels/regexp.go | 129 ++- .../prometheus/prometheus/promql/engine.go | 152 +++- .../promql/parser/generated_parser.y | 8 +- .../promql/parser/generated_parser.y.go | 787 +++++++++--------- .../prometheus/promql/parser/lex.go | 4 +- .../prometheus/promql/parser/parse.go | 8 +- .../promql/promqltest/testdata/limit.test | 119 +++ .../prometheus/tsdb/chunks/chunks.go | 9 - .../prometheus/prometheus/tsdb/head_read.go | 142 +--- .../prometheus/tsdb/ooo_head_read.go | 52 +- .../prometheus/prometheus/tsdb/testutil.go | 176 ++++ .../util/annotations/annotations.go | 10 + vendor/modules.txt | 8 +- 19 files changed, 1015 insertions(+), 630 deletions(-) create mode 100644 vendor/github.com/prometheus/prometheus/promql/promqltest/testdata/limit.test create mode 100644 vendor/github.com/prometheus/prometheus/tsdb/testutil.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 3dff9a86a37..6ec6a912a22 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ * `-ingest-storage.read-consistency`: configures the default read consistency. * `-ingest-storage.migration.distributor-send-to-ingesters-enabled`: enabled tee-ing writes to classic ingesters and Kafka, used during a live migration to the new ingest storage architecture. * `-ingester.partition-ring.*`: configures partitions ring backend. +* [FEATURE] Querier: added support for `limitk()` and `limit_ratio()` experimental PromQL functions. Experimental functions are disabled by default, but can be enabled setting `-querier.promql-experimental-functions-enabled=true` in the query-frontend and querier. #8632 * [CHANGE] Distributor: Incoming OTLP requests were previously size-limited by using limit from `-distributor.max-recv-msg-size` option. We have added option `-distributor.max-otlp-request-size` for limiting OTLP requests, with default value of 100 MiB. #8574 * [ENHANCEMENT] Compactor: Add `cortex_compactor_compaction_job_duration_seconds` and `cortex_compactor_compaction_job_blocks` histogram metrics to track duration of individual compaction jobs and number of blocks per job. #8371 * [ENHANCEMENT] Rules: Added per namespace max rules per rule group limit. The maximum number of rules per rule groups for all namespaces continues to be configured by `-ruler.max-rules-per-rule-group`, but now, this can be superseded by the new `-ruler.max-rules-per-rule-group-by-namespace` option on a per namespace basis. This new limit can be overridden using the overrides mechanism to be applied per-tenant. #8378 @@ -32,6 +33,7 @@ * [ENHANCEMENT] Alertmanager: Reloading config and templates no longer needs to hit the disk. #4967 * [ENHANCEMENT] Compactor: Added experimantal `-compactor.in-memory-tenant-meta-cache-size` option to set size of in-memory cache (in number of items) for parsed meta.json files. This can help when tenant has many meta.json files and their parsing before each compaction cycle is using a lot of CPU time. #8544 * [ENHANCEMENT] Distributor: Interrupt OTLP write request translation when context is canceled or has timed out. #8524 +* [ENHANCEMENT] Ingester, store-gateway: optimised regular expression matching for patterns like `1.*|2.*|3.*|...|1000.*`. #8632 * [BUGFIX] Query-frontend: fix `-querier.max-query-lookback` enforcement when `-compactor.blocks-retention-period` is not set, and viceversa. #8388 * [BUGFIX] Ingester: fix sporadic `not found` error causing an internal server error if label names are queried with matchers during head compaction. #8391 * [BUGFIX] Ingester, store-gateway: fix case insensitive regular expressions not matching correctly some Unicode characters. #8391 diff --git a/go.mod b/go.mod index b490c362609..3aac73e1fb6 100644 --- a/go.mod +++ b/go.mod @@ -20,7 +20,7 @@ require ( github.com/golang/snappy v0.0.4 github.com/google/gopacket v1.1.19 github.com/gorilla/mux v1.8.1 - github.com/grafana/dskit v0.0.0-20240626184720-35810fdf1c6d + github.com/grafana/dskit v0.0.0-20240704113758-97b2aa96bcb1 github.com/grafana/e2e v0.1.2-0.20240118170847-db90b84177fc github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/json-iterator/go v1.1.12 @@ -277,7 +277,7 @@ require ( ) // Using a fork of Prometheus with Mimir-specific changes. -replace github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v0.0.0-20240703103907-5131622acfee +replace github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v0.0.0-20240704133652-fb0cb30e280c // Replace memberlist with our fork which includes some fixes that haven't been // merged upstream yet: diff --git a/go.sum b/go.sum index 5266db655cb..1995eb9c7f5 100644 --- a/go.sum +++ b/go.sum @@ -515,8 +515,8 @@ github.com/grafana-tools/sdk v0.0.0-20220919052116-6562121319fc h1:PXZQA2WCxe85T github.com/grafana-tools/sdk v0.0.0-20220919052116-6562121319fc/go.mod h1:AHHlOEv1+GGQ3ktHMlhuTUwo3zljV3QJbC0+8o2kn+4= github.com/grafana/alerting v0.0.0-20240625192930-5b0553a572d3 h1:a7dtjsnqluU+1snjK+fCDbb/zj2b8STpNGsMm9bF92w= github.com/grafana/alerting v0.0.0-20240625192930-5b0553a572d3/go.mod h1:DLj8frbtCaITljC2jc0L85JQViPF3mPfOSiYhm1osso= -github.com/grafana/dskit v0.0.0-20240626184720-35810fdf1c6d h1:CD8PWWX+9lYdgeMquSofmLErvCtk7jb+3/W/SH6oo/k= -github.com/grafana/dskit v0.0.0-20240626184720-35810fdf1c6d/go.mod h1:HvSf3uf8Ps2vPpzHeAFyZTdUcbVr+Rxpq1xcx7J/muc= +github.com/grafana/dskit v0.0.0-20240704113758-97b2aa96bcb1 h1:Hf7Swe4Vr+makVm+TSym1u0QMolxMQ1/L9gm1Pzdq5I= +github.com/grafana/dskit v0.0.0-20240704113758-97b2aa96bcb1/go.mod h1:UA1BG0yY/B7lTcdeqoud+3/TglKmPL88OM5qCeRs8BU= github.com/grafana/e2e v0.1.2-0.20240118170847-db90b84177fc h1:BW+LjKJDz0So5LI8UZfW5neWeKpSkWqhmGjQFzcFfLM= github.com/grafana/e2e v0.1.2-0.20240118170847-db90b84177fc/go.mod h1:JVmqPBe8A/pZWwRoJW5ZjyALeY5OXMzPl7LrVXOdZAI= github.com/grafana/goautoneg v0.0.0-20240607115440-f335c04c58ce h1:WI1olbgS+sEl77qxEYbmt9TgRUz7iLqmjh8lYPpGlKQ= @@ -525,8 +525,8 @@ github.com/grafana/gomemcache v0.0.0-20240229205252-cd6a66d6fb56 h1:X8IKQ0wu40wp github.com/grafana/gomemcache v0.0.0-20240229205252-cd6a66d6fb56/go.mod h1:PGk3RjYHpxMM8HFPhKKo+vve3DdlPUELZLSDEFehPuU= github.com/grafana/memberlist v0.3.1-0.20220714140823-09ffed8adbbe h1:yIXAAbLswn7VNWBIvM71O2QsgfgW9fRXZNR0DXe6pDU= github.com/grafana/memberlist v0.3.1-0.20220714140823-09ffed8adbbe/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/grafana/mimir-prometheus v0.0.0-20240703103907-5131622acfee h1:H6XAHAr8YzO61zZvmGmSWt8QFgkWHbXTWwxqCHf5Wy8= -github.com/grafana/mimir-prometheus v0.0.0-20240703103907-5131622acfee/go.mod h1:DjC1WWVnO+kFTzxrvJ6uS3SvbtoRiW4/lgLFUd49OPg= +github.com/grafana/mimir-prometheus v0.0.0-20240704133652-fb0cb30e280c h1:9oIh0utTogdAZlmlqIOuq5y9lFVtCPnk29AQa5ChufM= +github.com/grafana/mimir-prometheus v0.0.0-20240704133652-fb0cb30e280c/go.mod h1:DjC1WWVnO+kFTzxrvJ6uS3SvbtoRiW4/lgLFUd49OPg= github.com/grafana/opentracing-contrib-go-stdlib v0.0.0-20230509071955-f410e79da956 h1:em1oddjXL8c1tL0iFdtVtPloq2hRPen2MJQKoAWpxu0= github.com/grafana/opentracing-contrib-go-stdlib v0.0.0-20230509071955-f410e79da956/go.mod h1:qtI1ogk+2JhVPIXVc6q+NHziSmy2W5GbdQZFUHADCBU= github.com/grafana/prometheus-alertmanager v0.25.1-0.20240625192351-66ec17e3aa45 h1:AJKOtDKAOg8XNFnIZSmqqqutoTSxVlRs6vekL2p2KEY= diff --git a/pkg/frontend/querymiddleware/astmapper/parallel.go b/pkg/frontend/querymiddleware/astmapper/parallel.go index 427d5d47546..7414b5cb240 100644 --- a/pkg/frontend/querymiddleware/astmapper/parallel.go +++ b/pkg/frontend/querymiddleware/astmapper/parallel.go @@ -28,6 +28,8 @@ var NonParallelFuncs = []string{ "absent", "absent_over_time", "histogram_quantile", + "limitk", + "limit_ratio", "sort_desc", "sort_by_label", "sort_by_label_desc", diff --git a/pkg/storage/ingest/reader.go b/pkg/storage/ingest/reader.go index e3e95d79924..2047b052fcc 100644 --- a/pkg/storage/ingest/reader.go +++ b/pkg/storage/ingest/reader.go @@ -304,12 +304,7 @@ func (r *PartitionReader) processNextFetchesUntilLagHonored(ctx context.Context, } if boff.Err() != nil { - // TODO should be moved to dskit's backoff - if ctx.Err() != nil { - return 0, context.Cause(ctx) - } - - return 0, boff.Err() + return 0, boff.ErrCause() } // If it took less than the max desired lag to replay the partition @@ -319,12 +314,7 @@ func (r *PartitionReader) processNextFetchesUntilLagHonored(ctx context.Context, } } - // TODO should be moved to dskit's backoff - if ctx.Err() != nil { - return 0, context.Cause(ctx) - } - - return 0, boff.Err() + return 0, boff.ErrCause() } func filterOutErrFetches(fetches kgo.Fetches) kgo.Fetches { diff --git a/vendor/github.com/grafana/dskit/backoff/backoff.go b/vendor/github.com/grafana/dskit/backoff/backoff.go index 7ce55647284..419af80e1ad 100644 --- a/vendor/github.com/grafana/dskit/backoff/backoff.go +++ b/vendor/github.com/grafana/dskit/backoff/backoff.go @@ -54,7 +54,7 @@ func (b *Backoff) Ongoing() bool { return b.ctx.Err() == nil && (b.cfg.MaxRetries == 0 || b.numRetries < b.cfg.MaxRetries) } -// Err returns the reason for terminating the backoff, or nil if it didn't terminate +// Err returns the reason for terminating the backoff, or nil if it didn't terminate. func (b *Backoff) Err() error { if b.ctx.Err() != nil { return b.ctx.Err() @@ -65,6 +65,15 @@ func (b *Backoff) Err() error { return nil } +// ErrCause is like Err() but returns the context cause if backoff is terminated because the +// context has been canceled. +func (b *Backoff) ErrCause() error { + if b.ctx.Err() != nil { + return context.Cause(b.ctx) + } + return b.Err() +} + // NumRetries returns the number of retries so far func (b *Backoff) NumRetries() int { return b.numRetries diff --git a/vendor/github.com/prometheus/prometheus/model/labels/regexp.go b/vendor/github.com/prometheus/prometheus/model/labels/regexp.go index 40853f79e75..3238190e989 100644 --- a/vendor/github.com/prometheus/prometheus/model/labels/regexp.go +++ b/vendor/github.com/prometheus/prometheus/model/labels/regexp.go @@ -31,7 +31,7 @@ const ( maxSetMatches = 256 // The minimum number of alternate values a regex should have to trigger - // the optimization done by optimizeEqualStringMatchers() and so use a map + // the optimization done by optimizeEqualOrPrefixStringMatchers() and so use a map // to match values instead of iterating over a list. This value has // been computed running BenchmarkOptimizeEqualStringMatchers. minEqualMultiStringMatcherMapThreshold = 16 @@ -382,7 +382,7 @@ func optimizeAlternatingLiterals(s string) (StringMatcher, []string) { return nil, nil } - multiMatcher := newEqualMultiStringMatcher(true, estimatedAlternates) + multiMatcher := newEqualMultiStringMatcher(true, estimatedAlternates, 0, 0) for end := strings.IndexByte(s, '|'); end > -1; end = strings.IndexByte(s, '|') { // Split the string into the next literal and the remainder @@ -457,7 +457,7 @@ func stringMatcherFromRegexp(re *syntax.Regexp) StringMatcher { clearBeginEndText(re) m := stringMatcherFromRegexpInternal(re) - m = optimizeEqualStringMatchers(m, minEqualMultiStringMatcherMapThreshold) + m = optimizeEqualOrPrefixStringMatchers(m, minEqualMultiStringMatcherMapThreshold) return m } @@ -778,17 +778,20 @@ func (m *equalStringMatcher) Matches(s string) bool { type multiStringMatcherBuilder interface { StringMatcher add(s string) + addPrefix(prefix string, prefixCaseSensitive bool, matcher StringMatcher) setMatches() []string } -func newEqualMultiStringMatcher(caseSensitive bool, estimatedSize int) multiStringMatcherBuilder { +func newEqualMultiStringMatcher(caseSensitive bool, estimatedSize, estimatedPrefixes, minPrefixLength int) multiStringMatcherBuilder { // If the estimated size is low enough, it's faster to use a slice instead of a map. - if estimatedSize < minEqualMultiStringMatcherMapThreshold { + if estimatedSize < minEqualMultiStringMatcherMapThreshold && estimatedPrefixes == 0 { return &equalMultiStringSliceMatcher{caseSensitive: caseSensitive, values: make([]string, 0, estimatedSize)} } return &equalMultiStringMapMatcher{ values: make(map[string]struct{}, estimatedSize), + prefixes: make(map[string][]StringMatcher, estimatedPrefixes), + minPrefixLen: minPrefixLength, caseSensitive: caseSensitive, } } @@ -804,6 +807,10 @@ func (m *equalMultiStringSliceMatcher) add(s string) { m.values = append(m.values, s) } +func (m *equalMultiStringSliceMatcher) addPrefix(_ string, _ bool, _ StringMatcher) { + panic("not implemented") +} + func (m *equalMultiStringSliceMatcher) setMatches() []string { return m.values } @@ -825,12 +832,17 @@ func (m *equalMultiStringSliceMatcher) Matches(s string) bool { return false } -// equalMultiStringMapMatcher matches a string exactly against a map of valid values. +// equalMultiStringMapMatcher matches a string exactly against a map of valid values +// or against a set of prefix matchers. type equalMultiStringMapMatcher struct { // values contains values to match a string against. If the matching is case insensitive, // the values here must be lowercase. values map[string]struct{} - + // prefixes maps strings, all of length minPrefixLen, to sets of matchers to check the rest of the string. + // If the matching is case insensitive, prefixes are all lowercase. + prefixes map[string][]StringMatcher + // minPrefixLen can be zero, meaning there are no prefix matchers. + minPrefixLen int caseSensitive bool } @@ -842,8 +854,27 @@ func (m *equalMultiStringMapMatcher) add(s string) { m.values[s] = struct{}{} } +func (m *equalMultiStringMapMatcher) addPrefix(prefix string, prefixCaseSensitive bool, matcher StringMatcher) { + if m.minPrefixLen == 0 { + panic("addPrefix called when no prefix length defined") + } + if len(prefix) < m.minPrefixLen { + panic("addPrefix called with a too short prefix") + } + if m.caseSensitive != prefixCaseSensitive { + panic("addPrefix called with a prefix whose case sensitivity is different than the expected one") + } + + s := prefix[:m.minPrefixLen] + if !m.caseSensitive { + s = strings.ToLower(s) + } + + m.prefixes[s] = append(m.prefixes[s], matcher) +} + func (m *equalMultiStringMapMatcher) setMatches() []string { - if len(m.values) >= maxSetMatches { + if len(m.values) >= maxSetMatches || len(m.prefixes) > 0 { return nil } @@ -859,8 +890,17 @@ func (m *equalMultiStringMapMatcher) Matches(s string) bool { s = toNormalisedLower(s) } - _, ok := m.values[s] - return ok + if _, ok := m.values[s]; ok { + return true + } + if m.minPrefixLen > 0 && len(s) >= m.minPrefixLen { + for _, matcher := range m.prefixes[s[:m.minPrefixLen]] { + if matcher.Matches(s) { + return true + } + } + } + return false } // toNormalisedLower normalise the input string using "Unicode Normalization Form D" and then convert @@ -943,20 +983,24 @@ func (m trueMatcher) Matches(_ string) bool { return true } -// optimizeEqualStringMatchers optimize a specific case where all matchers are made by an -// alternation (orStringMatcher) of strings checked for equality (equalStringMatcher). In -// this specific case, when we have many strings to match against we can use a map instead +// optimizeEqualOrPrefixStringMatchers optimize a specific case where all matchers are made by an +// alternation (orStringMatcher) of strings checked for equality (equalStringMatcher) or +// with a literal prefix (literalPrefixSensitiveStringMatcher or literalPrefixInsensitiveStringMatcher). +// +// In this specific case, when we have many strings to match against we can use a map instead // of iterating over the list of strings. -func optimizeEqualStringMatchers(input StringMatcher, threshold int) StringMatcher { +func optimizeEqualOrPrefixStringMatchers(input StringMatcher, threshold int) StringMatcher { var ( caseSensitive bool caseSensitiveSet bool numValues int + numPrefixes int + minPrefixLength int ) // Analyse the input StringMatcher to count the number of occurrences // and ensure all of them have the same case sensitivity. - analyseCallback := func(matcher *equalStringMatcher) bool { + analyseEqualMatcherCallback := func(matcher *equalStringMatcher) bool { // Ensure we don't have mixed case sensitivity. if caseSensitiveSet && caseSensitive != matcher.caseSensitive { return false @@ -969,34 +1013,55 @@ func optimizeEqualStringMatchers(input StringMatcher, threshold int) StringMatch return true } - if !findEqualStringMatchers(input, analyseCallback) { + analysePrefixMatcherCallback := func(prefix string, prefixCaseSensitive bool, matcher StringMatcher) bool { + // Ensure we don't have mixed case sensitivity. + if caseSensitiveSet && caseSensitive != prefixCaseSensitive { + return false + } else if !caseSensitiveSet { + caseSensitive = prefixCaseSensitive + caseSensitiveSet = true + } + if numPrefixes == 0 || len(prefix) < minPrefixLength { + minPrefixLength = len(prefix) + } + + numPrefixes++ + return true + } + + if !findEqualOrPrefixStringMatchers(input, analyseEqualMatcherCallback, analysePrefixMatcherCallback) { return input } - // If the number of values found is less than the threshold, then we should skip the optimization. - if numValues < threshold { + // If the number of values and prefixes found is less than the threshold, then we should skip the optimization. + if (numValues + numPrefixes) < threshold { return input } // Parse again the input StringMatcher to extract all values and storing them. // We can skip the case sensitivity check because we've already checked it and // if the code reach this point then it means all matchers have the same case sensitivity. - multiMatcher := newEqualMultiStringMatcher(caseSensitive, numValues) + multiMatcher := newEqualMultiStringMatcher(caseSensitive, numValues, numPrefixes, minPrefixLength) // Ignore the return value because we already iterated over the input StringMatcher // and it was all good. - findEqualStringMatchers(input, func(matcher *equalStringMatcher) bool { + findEqualOrPrefixStringMatchers(input, func(matcher *equalStringMatcher) bool { multiMatcher.add(matcher.s) return true + }, func(prefix string, prefixCaseSensitive bool, matcher StringMatcher) bool { + multiMatcher.addPrefix(prefix, caseSensitive, matcher) + return true }) return multiMatcher } -// findEqualStringMatchers analyze the input StringMatcher and calls the callback for each -// equalStringMatcher found. Returns true if and only if the input StringMatcher is *only* -// composed by an alternation of equalStringMatcher. -func findEqualStringMatchers(input StringMatcher, callback func(matcher *equalStringMatcher) bool) bool { +// findEqualOrPrefixStringMatchers analyze the input StringMatcher and calls the equalMatcherCallback for each +// equalStringMatcher found, and prefixMatcherCallback for each literalPrefixSensitiveStringMatcher and literalPrefixInsensitiveStringMatcher found. +// +// Returns true if and only if the input StringMatcher is *only* composed by an alternation of equalStringMatcher and/or +// literal prefix matcher. Returns false if prefixMatcherCallback is nil and a literal prefix matcher is encountered. +func findEqualOrPrefixStringMatchers(input StringMatcher, equalMatcherCallback func(matcher *equalStringMatcher) bool, prefixMatcherCallback func(prefix string, prefixCaseSensitive bool, matcher StringMatcher) bool) bool { orInput, ok := input.(orStringMatcher) if !ok { return false @@ -1005,17 +1070,27 @@ func findEqualStringMatchers(input StringMatcher, callback func(matcher *equalSt for _, m := range orInput { switch casted := m.(type) { case orStringMatcher: - if !findEqualStringMatchers(m, callback) { + if !findEqualOrPrefixStringMatchers(m, equalMatcherCallback, prefixMatcherCallback) { return false } case *equalStringMatcher: - if !callback(casted) { + if !equalMatcherCallback(casted) { + return false + } + + case *literalPrefixSensitiveStringMatcher: + if prefixMatcherCallback == nil || !prefixMatcherCallback(casted.prefix, true, casted) { + return false + } + + case *literalPrefixInsensitiveStringMatcher: + if prefixMatcherCallback == nil || !prefixMatcherCallback(casted.prefix, false, casted) { return false } default: - // It's not an equal string matcher, so we have to stop searching + // It's not an equal or prefix string matcher, so we have to stop searching // cause this optimization can't be applied. return false } diff --git a/vendor/github.com/prometheus/prometheus/promql/engine.go b/vendor/github.com/prometheus/prometheus/promql/engine.go index 203cba19f7c..caf0326bd2f 100644 --- a/vendor/github.com/prometheus/prometheus/promql/engine.go +++ b/vendor/github.com/prometheus/prometheus/promql/engine.go @@ -1319,7 +1319,7 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping index, ok := groupToResultIndex[groupingKey] // Add a new group if it doesn't exist. if !ok { - if aggExpr.Op != parser.TOPK && aggExpr.Op != parser.BOTTOMK { + if aggExpr.Op != parser.TOPK && aggExpr.Op != parser.BOTTOMK && aggExpr.Op != parser.LIMITK && aggExpr.Op != parser.LIMIT_RATIO { m := generateGroupingLabels(enh, series.Metric, aggExpr.Without, sortedGrouping) result = append(result, Series{Metric: m}) } @@ -1332,9 +1332,10 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping groups := make([]groupedAggregation, groupCount) var k int + var ratio float64 var seriess map[uint64]Series switch aggExpr.Op { - case parser.TOPK, parser.BOTTOMK: + case parser.TOPK, parser.BOTTOMK, parser.LIMITK: if !convertibleToInt64(param) { ev.errorf("Scalar value %v overflows int64", param) } @@ -1346,6 +1347,23 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping return nil, warnings } seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash. + case parser.LIMIT_RATIO: + if math.IsNaN(param) { + ev.errorf("Ratio value %v is NaN", param) + } + switch { + case param == 0: + return nil, warnings + case param < -1.0: + ratio = -1.0 + warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange())) + case param > 1.0: + ratio = 1.0 + warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange())) + default: + ratio = param + } + seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash. case parser.QUANTILE: if math.IsNaN(param) || param < 0 || param > 1 { warnings.Add(annotations.NewInvalidQuantileWarning(param, aggExpr.Param.PositionRange())) @@ -1363,11 +1381,12 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping enh.Ts = ts var ws annotations.Annotations switch aggExpr.Op { - case parser.TOPK, parser.BOTTOMK: - result, ws = ev.aggregationK(aggExpr, k, inputMatrix, seriesToResult, groups, enh, seriess) + case parser.TOPK, parser.BOTTOMK, parser.LIMITK, parser.LIMIT_RATIO: + result, ws = ev.aggregationK(aggExpr, k, ratio, inputMatrix, seriesToResult, groups, enh, seriess) // If this could be an instant query, shortcut so as not to change sort order. if ev.endTimestamp == ev.startTimestamp { - return result, ws + warnings.Merge(ws) + return result, warnings } default: ws = ev.aggregation(aggExpr, param, inputMatrix, result, seriesToResult, groups, enh) @@ -1382,7 +1401,7 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping // Assemble the output matrix. By the time we get here we know we don't have too many samples. switch aggExpr.Op { - case parser.TOPK, parser.BOTTOMK: + case parser.TOPK, parser.BOTTOMK, parser.LIMITK, parser.LIMIT_RATIO: result = make(Matrix, 0, len(seriess)) for _, ss := range seriess { result = append(result, ss) @@ -2755,14 +2774,15 @@ func vectorElemBinop(op parser.ItemType, lhs, rhs float64, hlhs, hrhs *histogram } type groupedAggregation struct { - seen bool // Was this output groups seen in the input at this timestamp. - hasFloat bool // Has at least 1 float64 sample aggregated. - hasHistogram bool // Has at least 1 histogram sample aggregated. - floatValue float64 - histogramValue *histogram.FloatHistogram - floatMean float64 // Mean, or "compensating value" for Kahan summation. - groupCount int - heap vectorByValueHeap + seen bool // Was this output groups seen in the input at this timestamp. + hasFloat bool // Has at least 1 float64 sample aggregated. + hasHistogram bool // Has at least 1 histogram sample aggregated. + floatValue float64 + histogramValue *histogram.FloatHistogram + floatMean float64 // Mean, or "compensating value" for Kahan summation. + groupCount int + groupAggrComplete bool // Used by LIMITK to short-cut series loop when we've reached K elem on every group + heap vectorByValueHeap } // aggregation evaluates sum, avg, count, stdvar, stddev or quantile at one timestep on inputMatrix. @@ -2959,19 +2979,22 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, q float64, inputMatrix return annos } -// aggregationK evaluates topk or bottomk at one timestep on inputMatrix. +// aggregationK evaluates topk, bottomk, limitk, or limit_ratio at one timestep on inputMatrix. // Output that has the same labels as the input, but just k of them per group. // seriesToResult maps inputMatrix indexes to groups indexes. -// For an instant query, returns a Matrix in descending order for topk or ascending for bottomk. +// For an instant query, returns a Matrix in descending order for topk or ascending for bottomk, or without any order for limitk / limit_ratio. // For a range query, aggregates output in the seriess map. -func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) { +func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int, r float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) { op := e.Op var s Sample var annos annotations.Annotations + // Used to short-cut the loop for LIMITK if we already collected k elements for every group + groupsRemaining := len(groups) for i := range groups { groups[i].seen = false } +seriesLoop: for si := range inputMatrix { f, _, ok := ev.nextValues(enh.Ts, &inputMatrix[si]) if !ok { @@ -2982,11 +3005,23 @@ func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int, inputMatrix Ma group := &groups[seriesToResult[si]] // Initialize this group if it's the first time we've seen it. if !group.seen { - *group = groupedAggregation{ - seen: true, - heap: make(vectorByValueHeap, 1, k), + // LIMIT_RATIO is a special case, as we may not add this very sample to the heap, + // while we also don't know the final size of it. + if op == parser.LIMIT_RATIO { + *group = groupedAggregation{ + seen: true, + heap: make(vectorByValueHeap, 0), + } + if ratiosampler.AddRatioSample(r, &s) { + heap.Push(&group.heap, &s) + } + } else { + *group = groupedAggregation{ + seen: true, + heap: make(vectorByValueHeap, 1, k), + } + group.heap[0] = s } - group.heap[0] = s continue } @@ -3017,6 +3052,26 @@ func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int, inputMatrix Ma } } + case parser.LIMITK: + if len(group.heap) < k { + heap.Push(&group.heap, &s) + } + // LIMITK optimization: early break if we've added K elem to _every_ group, + // especially useful for large timeseries where the user is exploring labels via e.g. + // limitk(10, my_metric) + if !group.groupAggrComplete && len(group.heap) == k { + group.groupAggrComplete = true + groupsRemaining-- + if groupsRemaining == 0 { + break seriesLoop + } + } + + case parser.LIMIT_RATIO: + if ratiosampler.AddRatioSample(r, &s) { + heap.Push(&group.heap, &s) + } + default: panic(fmt.Errorf("expected aggregation operator but got %q", op)) } @@ -3066,6 +3121,11 @@ func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int, inputMatrix Ma for _, v := range aggr.heap { add(v.Metric, v.F) } + + case parser.LIMITK, parser.LIMIT_RATIO: + for _, v := range aggr.heap { + add(v.Metric, v.F) + } } } @@ -3420,6 +3480,56 @@ func makeInt64Pointer(val int64) *int64 { return valp } +// Add RatioSampler interface to allow unit-testing (previously: Randomizer). +type RatioSampler interface { + // Return this sample "offset" between [0.0, 1.0] + sampleOffset(ts int64, sample *Sample) float64 + AddRatioSample(r float64, sample *Sample) bool +} + +// Use Hash(labels.String()) / maxUint64 as a "deterministic" +// value in [0.0, 1.0]. +type HashRatioSampler struct{} + +var ratiosampler RatioSampler = NewHashRatioSampler() + +func NewHashRatioSampler() *HashRatioSampler { + return &HashRatioSampler{} +} + +func (s *HashRatioSampler) sampleOffset(ts int64, sample *Sample) float64 { + const ( + float64MaxUint64 = float64(math.MaxUint64) + ) + return float64(sample.Metric.Hash()) / float64MaxUint64 +} + +func (s *HashRatioSampler) AddRatioSample(ratioLimit float64, sample *Sample) bool { + // If ratioLimit >= 0: add sample if sampleOffset is lesser than ratioLimit + // + // 0.0 ratioLimit 1.0 + // [---------|--------------------------] + // [#########...........................] + // + // e.g.: + // sampleOffset==0.3 && ratioLimit==0.4 + // 0.3 < 0.4 ? --> add sample + // + // Else if ratioLimit < 0: add sample if rand() return the "complement" of ratioLimit>=0 case + // (loosely similar behavior to negative array index in other programming languages) + // + // 0.0 1+ratioLimit 1.0 + // [---------|--------------------------] + // [.........###########################] + // + // e.g.: + // sampleOffset==0.3 && ratioLimit==-0.6 + // 0.3 >= 0.4 ? --> don't add sample + sampleOffset := s.sampleOffset(sample.T, sample) + return (ratioLimit >= 0 && sampleOffset < ratioLimit) || + (ratioLimit < 0 && sampleOffset >= (1.0+ratioLimit)) +} + type histogramStatsSeries struct { storage.Series } diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y index b39c1150a5b..d84acc37c5d 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y +++ b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y @@ -126,6 +126,8 @@ STDDEV STDVAR SUM TOPK +LIMITK +LIMIT_RATIO %token aggregatorsEnd // Keywords. @@ -609,7 +611,7 @@ metric : metric_identifier label_set ; -metric_identifier: AVG | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | IDENTIFIER | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | QUANTILE | STDDEV | STDVAR | SUM | TOPK | WITHOUT | START | END; +metric_identifier: AVG | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | IDENTIFIER | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | QUANTILE | STDDEV | STDVAR | SUM | TOPK | WITHOUT | START | END | LIMITK | LIMIT_RATIO; label_set : LEFT_BRACE label_set_list RIGHT_BRACE { $$ = labels.New($2...) } @@ -851,10 +853,10 @@ bucket_set_list : bucket_set_list SPACE number * Keyword lists. */ -aggregate_op : AVG | BOTTOMK | COUNT | COUNT_VALUES | GROUP | MAX | MIN | QUANTILE | STDDEV | STDVAR | SUM | TOPK ; +aggregate_op : AVG | BOTTOMK | COUNT | COUNT_VALUES | GROUP | MAX | MIN | QUANTILE | STDDEV | STDVAR | SUM | TOPK | LIMITK | LIMIT_RATIO; // Inside of grouping options label names can be recognized as keywords by the lexer. This is a list of keywords that could also be a label name. -maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2; +maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2 | LIMITK | LIMIT_RATIO; unary_op : ADD | SUB; diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go index d9a312a137e..07899c0a000 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go +++ b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go @@ -103,27 +103,29 @@ const STDDEV = 57411 const STDVAR = 57412 const SUM = 57413 const TOPK = 57414 -const aggregatorsEnd = 57415 -const keywordsStart = 57416 -const BOOL = 57417 -const BY = 57418 -const GROUP_LEFT = 57419 -const GROUP_RIGHT = 57420 -const IGNORING = 57421 -const OFFSET = 57422 -const ON = 57423 -const WITHOUT = 57424 -const keywordsEnd = 57425 -const preprocessorStart = 57426 -const START = 57427 -const END = 57428 -const preprocessorEnd = 57429 -const startSymbolsStart = 57430 -const START_METRIC = 57431 -const START_SERIES_DESCRIPTION = 57432 -const START_EXPRESSION = 57433 -const START_METRIC_SELECTOR = 57434 -const startSymbolsEnd = 57435 +const LIMITK = 57415 +const LIMIT_RATIO = 57416 +const aggregatorsEnd = 57417 +const keywordsStart = 57418 +const BOOL = 57419 +const BY = 57420 +const GROUP_LEFT = 57421 +const GROUP_RIGHT = 57422 +const IGNORING = 57423 +const OFFSET = 57424 +const ON = 57425 +const WITHOUT = 57426 +const keywordsEnd = 57427 +const preprocessorStart = 57428 +const START = 57429 +const END = 57430 +const preprocessorEnd = 57431 +const startSymbolsStart = 57432 +const START_METRIC = 57433 +const START_SERIES_DESCRIPTION = 57434 +const START_EXPRESSION = 57435 +const START_METRIC_SELECTOR = 57436 +const startSymbolsEnd = 57437 var yyToknames = [...]string{ "$end", @@ -198,6 +200,8 @@ var yyToknames = [...]string{ "STDVAR", "SUM", "TOPK", + "LIMITK", + "LIMIT_RATIO", "aggregatorsEnd", "keywordsStart", "BOOL", @@ -231,279 +235,298 @@ var yyExca = [...]int16{ -1, 1, 1, -1, -2, 0, - -1, 35, - 1, 134, - 10, 134, - 24, 134, + -1, 37, + 1, 136, + 10, 136, + 24, 136, -2, 0, - -1, 58, - 2, 172, - 15, 172, - 76, 172, - 82, 172, - -2, 100, - -1, 59, - 2, 173, - 15, 173, - 76, 173, - 82, 173, - -2, 101, -1, 60, 2, 174, 15, 174, - 76, 174, - 82, 174, - -2, 103, + 78, 174, + 84, 174, + -2, 100, -1, 61, 2, 175, 15, 175, - 76, 175, - 82, 175, - -2, 104, + 78, 175, + 84, 175, + -2, 101, -1, 62, 2, 176, 15, 176, - 76, 176, - 82, 176, - -2, 105, + 78, 176, + 84, 176, + -2, 103, -1, 63, 2, 177, 15, 177, - 76, 177, - 82, 177, - -2, 110, + 78, 177, + 84, 177, + -2, 104, -1, 64, 2, 178, 15, 178, - 76, 178, - 82, 178, - -2, 112, + 78, 178, + 84, 178, + -2, 105, -1, 65, 2, 179, 15, 179, - 76, 179, - 82, 179, - -2, 114, + 78, 179, + 84, 179, + -2, 110, -1, 66, 2, 180, 15, 180, - 76, 180, - 82, 180, - -2, 115, + 78, 180, + 84, 180, + -2, 112, -1, 67, 2, 181, 15, 181, - 76, 181, - 82, 181, - -2, 116, + 78, 181, + 84, 181, + -2, 114, -1, 68, 2, 182, 15, 182, - 76, 182, - 82, 182, - -2, 117, + 78, 182, + 84, 182, + -2, 115, -1, 69, 2, 183, 15, 183, - 76, 183, - 82, 183, + 78, 183, + 84, 183, + -2, 116, + -1, 70, + 2, 184, + 15, 184, + 78, 184, + 84, 184, + -2, 117, + -1, 71, + 2, 185, + 15, 185, + 78, 185, + 84, 185, -2, 118, - -1, 195, - 12, 231, - 13, 231, - 18, 231, - 19, 231, - 25, 231, - 40, 231, - 46, 231, - 47, 231, - 50, 231, - 56, 231, - 61, 231, - 62, 231, - 63, 231, - 64, 231, - 65, 231, - 66, 231, - 67, 231, - 68, 231, - 69, 231, - 70, 231, - 71, 231, - 72, 231, - 76, 231, - 80, 231, - 82, 231, - 85, 231, - 86, 231, + -1, 72, + 2, 186, + 15, 186, + 78, 186, + 84, 186, + -2, 122, + -1, 73, + 2, 187, + 15, 187, + 78, 187, + 84, 187, + -2, 123, + -1, 199, + 12, 237, + 13, 237, + 18, 237, + 19, 237, + 25, 237, + 40, 237, + 46, 237, + 47, 237, + 50, 237, + 56, 237, + 61, 237, + 62, 237, + 63, 237, + 64, 237, + 65, 237, + 66, 237, + 67, 237, + 68, 237, + 69, 237, + 70, 237, + 71, 237, + 72, 237, + 73, 237, + 74, 237, + 78, 237, + 82, 237, + 84, 237, + 87, 237, + 88, 237, -2, 0, - -1, 196, - 12, 231, - 13, 231, - 18, 231, - 19, 231, - 25, 231, - 40, 231, - 46, 231, - 47, 231, - 50, 231, - 56, 231, - 61, 231, - 62, 231, - 63, 231, - 64, 231, - 65, 231, - 66, 231, - 67, 231, - 68, 231, - 69, 231, - 70, 231, - 71, 231, - 72, 231, - 76, 231, - 80, 231, - 82, 231, - 85, 231, - 86, 231, + -1, 200, + 12, 237, + 13, 237, + 18, 237, + 19, 237, + 25, 237, + 40, 237, + 46, 237, + 47, 237, + 50, 237, + 56, 237, + 61, 237, + 62, 237, + 63, 237, + 64, 237, + 65, 237, + 66, 237, + 67, 237, + 68, 237, + 69, 237, + 70, 237, + 71, 237, + 72, 237, + 73, 237, + 74, 237, + 78, 237, + 82, 237, + 84, 237, + 87, 237, + 88, 237, -2, 0, - -1, 217, - 21, 229, + -1, 221, + 21, 235, -2, 0, - -1, 286, - 21, 230, + -1, 292, + 21, 236, -2, 0, } const yyPrivate = 57344 -const yyLast = 778 +const yyLast = 793 var yyAct = [...]int16{ - 151, 324, 322, 268, 329, 148, 221, 37, 187, 144, - 282, 281, 152, 113, 77, 173, 104, 102, 101, 6, - 223, 193, 105, 194, 195, 196, 128, 262, 260, 155, - 233, 103, 342, 293, 100, 319, 239, 116, 146, 318, - 315, 263, 156, 123, 106, 147, 284, 114, 295, 116, - 156, 341, 175, 259, 340, 253, 57, 264, 157, 114, - 117, 108, 313, 109, 235, 236, 157, 112, 237, 107, - 323, 174, 117, 175, 155, 96, 250, 99, 293, 224, - 226, 228, 229, 230, 238, 240, 243, 244, 245, 246, - 247, 177, 145, 225, 227, 231, 232, 234, 241, 242, - 98, 176, 178, 248, 249, 104, 2, 3, 4, 5, - 158, 105, 177, 110, 168, 162, 165, 302, 150, 160, - 191, 161, 176, 178, 189, 155, 213, 343, 106, 330, - 72, 179, 192, 33, 181, 155, 190, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 185, 301, 258, 212, 156, 214, 215, 188, - 256, 183, 290, 191, 252, 164, 155, 289, 300, 218, - 223, 79, 157, 217, 7, 299, 312, 257, 163, 251, - 233, 78, 288, 255, 182, 254, 239, 156, 216, 180, - 220, 124, 172, 120, 147, 311, 314, 171, 119, 261, - 287, 153, 154, 157, 279, 280, 79, 147, 283, 310, - 170, 118, 159, 10, 235, 236, 78, 309, 237, 147, - 308, 307, 306, 74, 76, 305, 250, 286, 304, 224, - 226, 228, 229, 230, 238, 240, 243, 244, 245, 246, - 247, 303, 81, 225, 227, 231, 232, 234, 241, 242, - 48, 34, 1, 248, 249, 122, 73, 121, 285, 47, - 291, 292, 294, 56, 296, 8, 9, 9, 46, 35, - 45, 44, 297, 298, 127, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, - 43, 42, 41, 125, 166, 40, 316, 317, 126, 39, - 38, 49, 186, 321, 338, 265, 326, 327, 328, 80, - 325, 184, 219, 332, 331, 334, 333, 75, 115, 149, - 335, 336, 100, 51, 72, 337, 53, 55, 222, 22, - 52, 339, 50, 167, 111, 0, 54, 0, 0, 0, - 0, 344, 0, 0, 0, 0, 0, 0, 82, 84, - 0, 70, 0, 0, 0, 0, 0, 18, 19, 93, - 94, 20, 0, 96, 97, 99, 83, 71, 0, 0, - 0, 0, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 68, 69, 0, 0, 0, 13, 98, 0, - 0, 24, 0, 30, 0, 0, 31, 32, 36, 100, - 51, 72, 0, 53, 267, 0, 22, 52, 0, 0, - 0, 266, 0, 54, 0, 270, 271, 269, 276, 278, - 275, 277, 272, 273, 274, 0, 84, 0, 70, 0, - 0, 0, 0, 0, 18, 19, 93, 94, 20, 0, - 96, 0, 99, 83, 71, 0, 0, 0, 0, 58, - 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, - 69, 0, 0, 0, 13, 98, 0, 0, 24, 0, - 30, 0, 0, 31, 32, 51, 72, 0, 53, 320, - 0, 22, 52, 0, 0, 0, 0, 0, 54, 0, - 270, 271, 269, 276, 278, 275, 277, 272, 273, 274, - 0, 0, 0, 70, 0, 0, 17, 72, 0, 18, - 19, 0, 22, 20, 0, 0, 0, 0, 0, 71, - 0, 0, 0, 0, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 0, 0, 0, 13, - 18, 19, 0, 24, 20, 30, 0, 0, 31, 32, - 0, 0, 0, 0, 0, 11, 12, 14, 15, 16, - 21, 23, 25, 26, 27, 28, 29, 17, 33, 0, - 13, 0, 0, 22, 24, 0, 30, 0, 0, 31, - 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 155, 330, 328, 274, 335, 152, 225, 39, 191, 148, + 288, 287, 156, 117, 81, 177, 227, 106, 105, 6, + 154, 108, 107, 197, 132, 198, 237, 109, 199, 200, + 159, 59, 243, 325, 324, 110, 321, 159, 189, 268, + 348, 301, 265, 127, 159, 192, 349, 264, 290, 195, + 176, 160, 159, 269, 308, 175, 319, 195, 160, 347, + 239, 240, 346, 112, 241, 113, 299, 161, 174, 270, + 263, 111, 254, 160, 161, 228, 230, 232, 233, 234, + 242, 244, 247, 248, 249, 250, 251, 255, 256, 161, + 114, 229, 231, 235, 236, 238, 245, 246, 108, 266, + 258, 252, 253, 329, 109, 157, 158, 159, 2, 3, + 4, 5, 307, 160, 162, 257, 262, 299, 172, 166, + 169, 217, 104, 164, 110, 165, 150, 306, 193, 161, + 178, 104, 179, 151, 305, 183, 196, 179, 185, 261, + 194, 201, 202, 203, 204, 205, 206, 207, 208, 209, + 210, 211, 212, 213, 214, 215, 128, 227, 88, 216, + 120, 218, 219, 100, 336, 103, 168, 237, 97, 98, + 118, 181, 100, 243, 103, 87, 181, 224, 259, 167, + 149, 180, 182, 121, 187, 76, 180, 182, 120, 260, + 102, 35, 124, 7, 10, 296, 151, 123, 118, 102, + 295, 239, 240, 267, 78, 241, 116, 186, 285, 286, + 122, 121, 289, 254, 318, 294, 228, 230, 232, 233, + 234, 242, 244, 247, 248, 249, 250, 251, 255, 256, + 317, 292, 229, 231, 235, 236, 238, 245, 246, 316, + 315, 314, 252, 253, 133, 134, 135, 136, 137, 138, + 139, 140, 141, 142, 143, 144, 145, 146, 147, 313, + 312, 311, 310, 309, 320, 293, 297, 298, 300, 273, + 302, 222, 151, 8, 85, 221, 272, 37, 303, 304, + 276, 277, 275, 282, 284, 281, 283, 278, 279, 280, + 220, 163, 126, 50, 125, 36, 1, 291, 151, 77, + 83, 49, 322, 323, 48, 83, 47, 104, 46, 327, + 82, 131, 332, 333, 334, 82, 331, 45, 184, 338, + 337, 340, 339, 80, 44, 43, 341, 342, 129, 53, + 76, 343, 55, 86, 88, 22, 54, 345, 170, 171, + 42, 130, 56, 41, 97, 98, 40, 350, 100, 101, + 103, 87, 58, 51, 190, 9, 9, 74, 344, 271, + 84, 188, 223, 18, 19, 79, 119, 20, 153, 57, + 226, 52, 115, 75, 0, 102, 0, 0, 60, 61, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 0, 0, 0, 13, 0, 0, 0, 24, + 0, 30, 0, 0, 31, 32, 38, 0, 53, 76, + 0, 55, 326, 0, 22, 54, 0, 0, 0, 0, + 0, 56, 0, 276, 277, 275, 282, 284, 281, 283, + 278, 279, 280, 0, 0, 0, 74, 0, 0, 0, + 0, 0, 18, 19, 0, 0, 20, 0, 0, 0, + 0, 0, 75, 0, 0, 0, 0, 60, 61, 62, + 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, + 73, 0, 0, 0, 13, 0, 0, 0, 24, 0, + 30, 0, 0, 31, 32, 53, 76, 0, 55, 0, + 0, 22, 54, 0, 0, 0, 0, 0, 56, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 74, 0, 17, 76, 0, 0, 18, + 19, 22, 0, 20, 0, 0, 0, 0, 0, 75, + 0, 0, 0, 0, 60, 61, 62, 63, 64, 65, + 66, 67, 68, 69, 70, 71, 72, 73, 0, 18, + 19, 13, 0, 20, 0, 24, 0, 30, 0, 0, + 31, 32, 0, 0, 11, 12, 14, 15, 16, 21, + 23, 25, 26, 27, 28, 29, 33, 34, 17, 35, + 0, 13, 0, 0, 22, 24, 0, 30, 0, 0, + 31, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 18, 19, 0, 0, 20, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 11, 12, 14, 15, - 16, 21, 23, 25, 26, 27, 28, 29, 100, 0, - 0, 13, 0, 0, 0, 24, 169, 30, 0, 0, - 31, 32, 0, 0, 0, 0, 0, 100, 0, 0, - 0, 0, 0, 0, 82, 84, 85, 0, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 0, 96, - 97, 99, 83, 82, 84, 85, 0, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 0, 96, 97, - 99, 83, 100, 0, 98, 0, 0, 0, 0, 0, + 0, 0, 18, 19, 0, 0, 20, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 11, 12, 14, + 15, 16, 21, 23, 25, 26, 27, 28, 29, 33, + 34, 104, 0, 0, 13, 0, 0, 0, 24, 173, + 30, 0, 0, 31, 32, 0, 0, 0, 0, 0, + 104, 0, 0, 0, 0, 0, 0, 86, 88, 89, + 0, 90, 91, 92, 93, 94, 95, 96, 97, 98, + 99, 0, 100, 101, 103, 87, 86, 88, 89, 0, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, + 0, 100, 101, 103, 87, 104, 0, 0, 0, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 100, 0, 98, 0, 0, 0, 0, 82, 84, - 85, 0, 86, 87, 88, 0, 90, 91, 92, 93, - 94, 95, 0, 96, 97, 99, 83, 82, 84, 85, - 0, 86, 87, 0, 0, 90, 91, 0, 93, 94, - 95, 0, 96, 97, 99, 83, 0, 0, 98, 0, + 0, 0, 0, 0, 104, 0, 0, 0, 102, 0, + 0, 86, 88, 89, 0, 90, 91, 92, 0, 94, + 95, 96, 97, 98, 99, 0, 100, 101, 103, 87, + 86, 88, 89, 0, 90, 91, 0, 0, 94, 95, + 0, 97, 98, 99, 0, 100, 101, 103, 87, 0, + 0, 0, 0, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 98, + 0, 0, 102, } var yyPact = [...]int16{ - 17, 164, 555, 555, 388, 494, -1000, -1000, -1000, 120, + 17, 183, 566, 566, 396, 503, -1000, -1000, -1000, 178, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, 204, -1000, 240, -1000, 633, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, 303, -1000, 272, -1000, 646, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - 29, 113, -1000, 463, -1000, 463, 117, -1000, -1000, -1000, + -1000, -1000, 20, 109, -1000, 473, -1000, 473, 172, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 47, -1000, -1000, 191, -1000, -1000, 253, -1000, - 19, -1000, -49, -49, -49, -49, -49, -49, -49, -49, - -49, -49, -49, -49, -49, -49, -49, -49, 36, 116, - 210, 113, -60, -1000, 163, 163, 311, -1000, 614, 20, - -1000, 190, -1000, -1000, 69, 48, -1000, -1000, -1000, 169, - -1000, 159, -1000, 147, 463, -1000, -58, -53, -1000, 463, - 463, 463, 463, 463, 463, 463, 463, 463, 463, 463, - 463, 463, 463, 463, -1000, 185, -1000, -1000, -1000, 111, - -1000, -1000, -1000, -1000, -1000, -1000, 55, 55, 167, -1000, - -1000, -1000, -1000, 168, -1000, -1000, 157, -1000, 633, -1000, - -1000, 35, -1000, 158, -1000, -1000, -1000, -1000, -1000, 152, - -1000, -1000, -1000, -1000, -1000, 27, 2, 1, -1000, -1000, - -1000, 387, 385, 163, 163, 163, 163, 20, 20, 308, - 308, 308, 697, 678, 308, 308, 697, 20, 20, 308, - 20, 385, -1000, 24, -1000, -1000, -1000, 198, -1000, 160, + -1000, -1000, -1000, -1000, -1000, -1000, 186, -1000, -1000, 190, + -1000, -1000, 290, -1000, 19, -1000, -53, -53, -53, -53, + -53, -53, -53, -53, -53, -53, -53, -53, -53, -53, + -53, -53, 124, 18, 289, 109, -57, -1000, 164, 164, + 317, -1000, 627, 108, -1000, 48, -1000, -1000, 128, 133, + -1000, -1000, -1000, 298, -1000, 182, -1000, 33, 473, -1000, + -58, -51, -1000, 473, 473, 473, 473, 473, 473, 473, + 473, 473, 473, 473, 473, 473, 473, 473, -1000, 187, + -1000, -1000, -1000, 106, -1000, -1000, -1000, -1000, -1000, -1000, + 88, 88, 269, -1000, -1000, -1000, -1000, 155, -1000, -1000, + 93, -1000, 646, -1000, -1000, 158, -1000, 114, -1000, -1000, + -1000, -1000, -1000, 45, -1000, -1000, -1000, -1000, -1000, 16, + 73, 13, -1000, -1000, -1000, 252, 117, 164, 164, 164, + 164, 108, 108, 293, 293, 293, 710, 691, 293, 293, + 710, 108, 108, 293, 108, 117, -1000, 26, -1000, -1000, + -1000, 263, -1000, 193, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 463, -1000, -1000, -1000, -1000, -1000, -1000, 59, - 59, 22, 59, 104, 104, 151, 100, -1000, -1000, 235, - 222, 219, 216, 215, 214, 211, 203, 189, 170, -1000, - -1000, -1000, -1000, -1000, -1000, 41, 194, -1000, -1000, 18, - -1000, 633, -1000, -1000, -1000, 59, -1000, 13, 9, 462, - -1000, -1000, -1000, 14, 10, 55, 55, 55, 115, 115, - 14, 115, 14, -1000, -1000, -1000, -1000, -1000, 59, 59, - -1000, -1000, -1000, 59, -1000, -1000, -1000, -1000, -1000, -1000, - 55, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 30, -1000, - 106, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 473, -1000, + -1000, -1000, -1000, -1000, -1000, 98, 98, 15, 98, 41, + 41, 110, 37, -1000, -1000, 257, 256, 255, 254, 253, + 235, 234, 233, 224, 208, -1000, -1000, -1000, -1000, -1000, + -1000, 35, 262, -1000, -1000, 14, -1000, 646, -1000, -1000, + -1000, 98, -1000, 8, 7, 395, -1000, -1000, -1000, 47, + 11, 88, 88, 88, 150, 150, 47, 150, 47, -1000, + -1000, -1000, -1000, -1000, 98, 98, -1000, -1000, -1000, 98, + -1000, -1000, -1000, -1000, -1000, -1000, 88, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 38, -1000, 25, -1000, -1000, -1000, + -1000, } var yyPgo = [...]int16{ - 0, 334, 13, 332, 6, 15, 328, 263, 327, 319, - 318, 213, 265, 317, 14, 312, 10, 11, 311, 309, - 8, 305, 3, 4, 304, 2, 1, 0, 302, 12, - 5, 301, 300, 18, 191, 299, 298, 7, 295, 294, - 17, 293, 56, 292, 291, 290, 274, 271, 270, 268, - 259, 250, 9, 258, 252, 251, + 0, 372, 13, 371, 6, 15, 370, 352, 369, 368, + 366, 194, 273, 365, 14, 362, 10, 11, 361, 360, + 8, 359, 3, 4, 358, 2, 1, 0, 354, 12, + 5, 353, 346, 18, 156, 343, 341, 7, 340, 338, + 17, 328, 31, 325, 324, 317, 311, 308, 306, 304, + 301, 293, 9, 297, 296, 295, } var yyR1 = [...]int8{ @@ -519,18 +542,18 @@ var yyR1 = [...]int8{ 1, 2, 2, 2, 2, 2, 2, 2, 12, 12, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 11, 11, 11, 11, 13, 13, 13, 14, - 14, 14, 14, 55, 19, 19, 19, 19, 18, 18, - 18, 18, 18, 18, 18, 18, 18, 28, 28, 28, - 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, - 22, 22, 22, 22, 22, 22, 22, 23, 23, 24, - 24, 24, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, + 7, 7, 7, 7, 11, 11, 11, 11, 13, 13, + 13, 14, 14, 14, 14, 55, 19, 19, 19, 19, + 18, 18, 18, 18, 18, 18, 18, 18, 18, 28, + 28, 28, 20, 20, 20, 20, 21, 21, 21, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, + 23, 24, 24, 24, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, - 6, 8, 8, 5, 5, 5, 5, 44, 27, 29, - 29, 30, 30, 26, 25, 25, 52, 48, 10, 53, - 53, 17, 17, + 6, 6, 6, 6, 6, 6, 6, 8, 8, 5, + 5, 5, 5, 44, 27, 29, 29, 30, 30, 26, + 25, 25, 52, 48, 10, 53, 53, 17, 17, } var yyR2 = [...]int8{ @@ -546,94 +569,96 @@ var yyR2 = [...]int8{ 2, 3, 3, 1, 3, 3, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 3, 4, 2, 0, 3, 1, 2, 3, - 3, 2, 1, 2, 0, 3, 2, 1, 1, 3, - 1, 3, 4, 1, 3, 5, 5, 1, 1, 1, - 4, 3, 3, 2, 3, 1, 2, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, - 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 3, 4, 2, 0, 3, 1, + 2, 3, 3, 2, 1, 2, 0, 3, 2, 1, + 1, 3, 1, 3, 4, 1, 3, 5, 5, 1, + 1, 1, 4, 3, 3, 2, 3, 1, 2, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, + 3, 3, 1, 2, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 1, 1, 1, 2, 1, 1, 1, 1, 0, - 1, 0, 1, + 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, + 2, 1, 1, 1, 1, 0, 1, 0, 1, } var yyChk = [...]int16{ - -1000, -54, 89, 90, 91, 92, 2, 10, -12, -7, - -11, 61, 62, 76, 63, 64, 65, 12, 46, 47, - 50, 66, 18, 67, 80, 68, 69, 70, 71, 72, - 82, 85, 86, 13, -55, -12, 10, -37, -32, -35, - -38, -43, -44, -45, -47, -48, -49, -50, -51, -31, - -3, 12, 19, 15, 25, -8, -7, -42, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 40, 56, 13, -51, -11, -13, 20, -14, 12, 2, - -19, 2, 40, 58, 41, 42, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 55, 56, 80, 57, - 14, -33, -40, 2, 76, 82, 15, -40, -37, -37, - -42, -1, 20, -2, 12, -10, 2, 25, 20, 7, - 2, 4, 2, 24, -34, -41, -36, -46, 75, -34, - -34, -34, -34, -34, -34, -34, -34, -34, -34, -34, - -34, -34, -34, -34, -52, 56, 2, 9, -30, -9, - 2, -27, -29, 85, 86, 19, 40, 56, -52, 2, - -40, -33, -16, 15, 2, -16, -39, 22, -37, 22, - 20, 7, 2, -5, 2, 4, 53, 43, 54, -5, - 20, -14, 25, 2, -18, 5, -28, -20, 12, -27, - -29, 16, -37, 79, 81, 77, 78, -37, -37, -37, - -37, -37, -37, -37, -37, -37, -37, -37, -37, -37, - -37, -37, -52, 15, -27, -27, 21, 6, 2, -15, - 22, -4, -6, 2, 61, 75, 62, 76, 63, 64, - 65, 77, 78, 12, 79, 46, 47, 50, 66, 18, - 67, 80, 81, 68, 69, 70, 71, 72, 85, 86, - 58, 22, 7, 20, -2, 25, 2, 25, 2, 26, - 26, -29, 26, 40, 56, -21, 24, 17, -22, 30, - 28, 29, 35, 36, 37, 33, 31, 34, 32, -16, - -16, -17, -16, -17, 22, -53, -52, 2, 22, 7, - 2, -37, -26, 19, -26, 26, -26, -20, -20, 24, - 17, 2, 17, 6, 6, 6, 6, 6, 6, 6, - 6, 6, 6, 21, 2, 22, -4, -26, 26, 26, - 17, -22, -25, 56, -26, -30, -27, -27, -27, -23, - 14, -23, -25, -23, -25, -26, -26, -26, -24, -27, - 24, 21, 2, 21, -27, + -1000, -54, 91, 92, 93, 94, 2, 10, -12, -7, + -11, 61, 62, 78, 63, 64, 65, 12, 46, 47, + 50, 66, 18, 67, 82, 68, 69, 70, 71, 72, + 84, 87, 88, 73, 74, 13, -55, -12, 10, -37, + -32, -35, -38, -43, -44, -45, -47, -48, -49, -50, + -51, -31, -3, 12, 19, 15, 25, -8, -7, -42, + 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 40, 56, 13, -51, -11, -13, + 20, -14, 12, 2, -19, 2, 40, 58, 41, 42, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 55, 56, 82, 57, 14, -33, -40, 2, 78, 84, + 15, -40, -37, -37, -42, -1, 20, -2, 12, -10, + 2, 25, 20, 7, 2, 4, 2, 24, -34, -41, + -36, -46, 77, -34, -34, -34, -34, -34, -34, -34, + -34, -34, -34, -34, -34, -34, -34, -34, -52, 56, + 2, 9, -30, -9, 2, -27, -29, 87, 88, 19, + 40, 56, -52, 2, -40, -33, -16, 15, 2, -16, + -39, 22, -37, 22, 20, 7, 2, -5, 2, 4, + 53, 43, 54, -5, 20, -14, 25, 2, -18, 5, + -28, -20, 12, -27, -29, 16, -37, 81, 83, 79, + 80, -37, -37, -37, -37, -37, -37, -37, -37, -37, + -37, -37, -37, -37, -37, -37, -52, 15, -27, -27, + 21, 6, 2, -15, 22, -4, -6, 2, 61, 77, + 62, 78, 63, 64, 65, 79, 80, 12, 81, 46, + 47, 50, 66, 18, 67, 82, 83, 68, 69, 70, + 71, 72, 87, 88, 58, 73, 74, 22, 7, 20, + -2, 25, 2, 25, 2, 26, 26, -29, 26, 40, + 56, -21, 24, 17, -22, 30, 28, 29, 35, 36, + 37, 33, 31, 34, 32, -16, -16, -17, -16, -17, + 22, -53, -52, 2, 22, 7, 2, -37, -26, 19, + -26, 26, -26, -20, -20, 24, 17, 2, 17, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 21, + 2, 22, -4, -26, 26, 26, 17, -22, -25, 56, + -26, -30, -27, -27, -27, -23, 14, -23, -25, -23, + -25, -26, -26, -26, -24, -27, 24, 21, 2, 21, + -27, } var yyDef = [...]int16{ - 0, -2, 125, 125, 0, 0, 7, 6, 1, 125, + 0, -2, 127, 127, 0, 0, 7, 6, 1, 127, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 0, 2, -2, 3, 4, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 0, 106, 217, 0, 227, 0, 83, 84, -2, -2, + 119, 120, 121, 122, 123, 0, 2, -2, 3, 4, + 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, + 18, 19, 0, 106, 223, 0, 233, 0, 83, 84, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - 211, 212, 0, 5, 98, 0, 124, 127, 0, 132, - 133, 137, 43, 43, 43, 43, 43, 43, 43, 43, - 43, 43, 43, 43, 43, 43, 43, 43, 0, 0, - 0, 0, 22, 23, 0, 0, 0, 60, 0, 81, - 82, 0, 87, 89, 0, 93, 97, 228, 122, 0, - 128, 0, 131, 136, 0, 42, 47, 48, 44, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 67, 0, 69, 226, 70, 0, - 72, 221, 222, 73, 74, 218, 0, 0, 0, 80, - 20, 21, 24, 0, 54, 25, 0, 62, 64, 66, - 85, 0, 90, 0, 96, 213, 214, 215, 216, 0, - 123, 126, 129, 130, 135, 138, 140, 143, 147, 148, - 149, 0, 26, 0, 0, -2, -2, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 68, 0, 219, 220, 75, -2, 79, 0, - 53, 56, 58, 59, 184, 185, 186, 187, 188, 189, + -2, -2, -2, -2, 217, 218, 0, 5, 98, 0, + 126, 129, 0, 134, 135, 139, 43, 43, 43, 43, + 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, + 43, 43, 0, 0, 0, 0, 22, 23, 0, 0, + 0, 60, 0, 81, 82, 0, 87, 89, 0, 93, + 97, 234, 124, 0, 130, 0, 133, 138, 0, 42, + 47, 48, 44, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 67, 0, + 69, 232, 70, 0, 72, 227, 228, 73, 74, 224, + 0, 0, 0, 80, 20, 21, 24, 0, 54, 25, + 0, 62, 64, 66, 85, 0, 90, 0, 96, 219, + 220, 221, 222, 0, 125, 128, 131, 132, 137, 140, + 142, 145, 149, 150, 151, 0, 26, 0, 0, -2, + -2, 27, 28, 29, 30, 31, 32, 33, 34, 35, + 36, 37, 38, 39, 40, 41, 68, 0, 225, 226, + 75, -2, 79, 0, 53, 56, 58, 59, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 61, 65, 86, 88, 91, 95, 92, 94, 0, - 0, 0, 0, 0, 0, 0, 0, 153, 155, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, - 46, 49, 232, 50, 71, 0, -2, 78, 51, 0, - 57, 63, 139, 223, 141, 0, 144, 0, 0, 0, - 151, 156, 152, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 76, 77, 52, 55, 142, 0, 0, - 150, 154, 157, 0, 225, 158, 159, 160, 161, 162, - 0, 163, 164, 165, 166, 145, 146, 224, 0, 170, - 0, 168, 171, 167, 169, + 210, 211, 212, 213, 214, 215, 216, 61, 65, 86, + 88, 91, 95, 92, 94, 0, 0, 0, 0, 0, + 0, 0, 0, 155, 157, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 45, 46, 49, 238, 50, + 71, 0, -2, 78, 51, 0, 57, 63, 141, 229, + 143, 0, 146, 0, 0, 0, 153, 158, 154, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 76, + 77, 52, 55, 144, 0, 0, 152, 156, 159, 0, + 231, 160, 161, 162, 163, 164, 0, 165, 166, 167, + 168, 147, 148, 230, 0, 172, 0, 170, 173, 169, + 171, } var yyTok1 = [...]int8{ @@ -650,7 +675,7 @@ var yyTok2 = [...]int8{ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, + 92, 93, 94, 95, } var yyTok3 = [...]int8{ @@ -1506,66 +1531,66 @@ yydefault: { yyVAL.labels = yyDollar[1].labels } - case 122: + case 124: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.labels = labels.New(yyDollar[2].lblList...) } - case 123: + case 125: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.labels = labels.New(yyDollar[2].lblList...) } - case 124: + case 126: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.labels = labels.New() } - case 125: + case 127: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.labels = labels.New() } - case 126: + case 128: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.lblList = append(yyDollar[1].lblList, yyDollar[3].label) } - case 127: + case 129: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.lblList = []labels.Label{yyDollar[1].label} } - case 128: + case 130: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label set", "\",\" or \"}\"") yyVAL.lblList = yyDollar[1].lblList } - case 129: + case 131: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} } - case 130: + case 132: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("label set", "string") yyVAL.label = labels.Label{} } - case 131: + case 133: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label set", "\"=\"") yyVAL.label = labels.Label{} } - case 132: + case 134: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("label set", "identifier or \"}\"") yyVAL.label = labels.Label{} } - case 133: + case 135: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).generatedParserResult = &seriesDescription{ @@ -1573,33 +1598,33 @@ yydefault: values: yyDollar[2].series, } } - case 134: + case 136: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.series = []SequenceValue{} } - case 135: + case 137: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = append(yyDollar[1].series, yyDollar[3].series...) } - case 136: + case 138: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.series = yyDollar[1].series } - case 137: + case 139: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("series values", "") yyVAL.series = nil } - case 138: + case 140: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Omitted: true}} } - case 139: + case 141: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1607,12 +1632,12 @@ yydefault: yyVAL.series = append(yyVAL.series, SequenceValue{Omitted: true}) } } - case 140: + case 142: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Value: yyDollar[1].float}} } - case 141: + case 143: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1621,7 +1646,7 @@ yydefault: yyVAL.series = append(yyVAL.series, SequenceValue{Value: yyDollar[1].float}) } } - case 142: + case 144: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1631,12 +1656,12 @@ yydefault: yyDollar[1].float += yyDollar[2].float } } - case 143: + case 145: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Histogram: yyDollar[1].histogram}} } - case 144: + case 146: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1646,7 +1671,7 @@ yydefault: //$1 += $2 } } - case 145: + case 147: yyDollar = yyS[yypt-5 : yypt+1] { val, err := yylex.(*parser).histogramsIncreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) @@ -1655,7 +1680,7 @@ yydefault: } yyVAL.series = val } - case 146: + case 148: yyDollar = yyS[yypt-5 : yypt+1] { val, err := yylex.(*parser).histogramsDecreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) @@ -1664,7 +1689,7 @@ yydefault: } yyVAL.series = val } - case 147: + case 149: yyDollar = yyS[yypt-1 : yypt+1] { if yyDollar[1].item.Val != "stale" { @@ -1672,124 +1697,124 @@ yydefault: } yyVAL.float = math.Float64frombits(value.StaleNaN) } - case 150: + case 152: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } - case 151: + case 153: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } - case 152: + case 154: yyDollar = yyS[yypt-3 : yypt+1] { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } - case 153: + case 155: yyDollar = yyS[yypt-2 : yypt+1] { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } - case 154: + case 156: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = *(yylex.(*parser).mergeMaps(&yyDollar[1].descriptors, &yyDollar[3].descriptors)) } - case 155: + case 157: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.descriptors = yyDollar[1].descriptors } - case 156: + case 158: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("histogram description", "histogram description key, e.g. buckets:[5 10 7]") } - case 157: + case 159: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["schema"] = yyDollar[3].int } - case 158: + case 160: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["sum"] = yyDollar[3].float } - case 159: + case 161: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["count"] = yyDollar[3].float } - case 160: + case 162: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["z_bucket"] = yyDollar[3].float } - case 161: + case 163: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["z_bucket_w"] = yyDollar[3].float } - case 162: + case 164: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["custom_values"] = yyDollar[3].bucket_set } - case 163: + case 165: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["buckets"] = yyDollar[3].bucket_set } - case 164: + case 166: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["offset"] = yyDollar[3].int } - case 165: + case 167: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["n_buckets"] = yyDollar[3].bucket_set } - case 166: + case 168: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["n_offset"] = yyDollar[3].int } - case 167: + case 169: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.bucket_set = yyDollar[2].bucket_set } - case 168: + case 170: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.bucket_set = yyDollar[2].bucket_set } - case 169: + case 171: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.bucket_set = append(yyDollar[1].bucket_set, yyDollar[3].float) } - case 170: + case 172: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.bucket_set = []float64{yyDollar[1].float} } - case 217: + case 223: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &NumberLiteral{ @@ -1797,22 +1822,22 @@ yydefault: PosRange: yyDollar[1].item.PositionRange(), } } - case 218: + case 224: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.float = yylex.(*parser).number(yyDollar[1].item.Val) } - case 219: + case 225: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.float = yyDollar[2].float } - case 220: + case 226: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.float = -yyDollar[2].float } - case 223: + case 229: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -1821,17 +1846,17 @@ yydefault: yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "invalid repetition in series values: %s", err) } } - case 224: + case 230: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.int = -int64(yyDollar[2].uint) } - case 225: + case 231: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.int = int64(yyDollar[1].uint) } - case 226: + case 232: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -1840,7 +1865,7 @@ yydefault: yylex.(*parser).addParseErr(yyDollar[1].item.PositionRange(), err) } } - case 227: + case 233: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &StringLiteral{ @@ -1848,7 +1873,7 @@ yydefault: PosRange: yyDollar[1].item.PositionRange(), } } - case 228: + case 234: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.item = Item{ @@ -1857,12 +1882,12 @@ yydefault: Val: yylex.(*parser).unquoteString(yyDollar[1].item.Val), } } - case 229: + case 235: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.duration = 0 } - case 231: + case 237: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.strings = nil diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/lex.go b/vendor/github.com/prometheus/prometheus/promql/parser/lex.go index c8ea4c46e86..8c7fbb89b96 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/lex.go +++ b/vendor/github.com/prometheus/prometheus/promql/parser/lex.go @@ -65,7 +65,7 @@ func (i ItemType) IsAggregator() bool { return i > aggregatorsStart && i < aggre // IsAggregatorWithParam returns true if the Item is an aggregator that takes a parameter. // Returns false otherwise. func (i ItemType) IsAggregatorWithParam() bool { - return i == TOPK || i == BOTTOMK || i == COUNT_VALUES || i == QUANTILE + return i == TOPK || i == BOTTOMK || i == COUNT_VALUES || i == QUANTILE || i == LIMITK || i == LIMIT_RATIO } // IsKeyword returns true if the Item corresponds to a keyword. @@ -118,6 +118,8 @@ var key = map[string]ItemType{ "bottomk": BOTTOMK, "count_values": COUNT_VALUES, "quantile": QUANTILE, + "limitk": LIMITK, + "limit_ratio": LIMIT_RATIO, // Keywords. "offset": OFFSET, diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/parse.go b/vendor/github.com/prometheus/prometheus/promql/parser/parse.go index f3fa27f84e6..fe3f2edc31a 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/parse.go +++ b/vendor/github.com/prometheus/prometheus/promql/parser/parse.go @@ -447,6 +447,12 @@ func (p *parser) newAggregateExpr(op Item, modifier, args Node) (ret *AggregateE desiredArgs := 1 if ret.Op.IsAggregatorWithParam() { + if !EnableExperimentalFunctions && (ret.Op == LIMITK || ret.Op == LIMIT_RATIO) { + // In mimir we return a custom message which doesn't mention the CLI flag that should be used to enable + // experimental functions, given it's different (and in SaaS customers don't even have access to it). + p.addParseErrf(ret.PositionRange(), "limitk() and limit_ratio() functions are not enabled") + return + } desiredArgs = 2 ret.Param = arguments[0] @@ -672,7 +678,7 @@ func (p *parser) checkAST(node Node) (typ ValueType) { p.addParseErrf(n.PositionRange(), "aggregation operator expected in aggregation expression but got %q", n.Op) } p.expectType(n.Expr, ValueTypeVector, "aggregation expression") - if n.Op == TOPK || n.Op == BOTTOMK || n.Op == QUANTILE { + if n.Op == TOPK || n.Op == BOTTOMK || n.Op == QUANTILE || n.Op == LIMITK || n.Op == LIMIT_RATIO { p.expectType(n.Param, ValueTypeScalar, "aggregation parameter") } if n.Op == COUNT_VALUES { diff --git a/vendor/github.com/prometheus/prometheus/promql/promqltest/testdata/limit.test b/vendor/github.com/prometheus/prometheus/promql/promqltest/testdata/limit.test new file mode 100644 index 00000000000..0ab363f9ae1 --- /dev/null +++ b/vendor/github.com/prometheus/prometheus/promql/promqltest/testdata/limit.test @@ -0,0 +1,119 @@ +# Tests for limitk +# +# NB: those many `and http_requests` are to ensure that the series _are_ indeed +# a subset of the original series. +load 5m + http_requests{job="api-server", instance="0", group="production"} 0+10x10 + http_requests{job="api-server", instance="1", group="production"} 0+20x10 + http_requests{job="api-server", instance="0", group="canary"} 0+30x10 + http_requests{job="api-server", instance="1", group="canary"} 0+40x10 + http_requests{job="api-server", instance="2", group="canary"} 0+50x10 + http_requests{job="api-server", instance="3", group="canary"} 0+60x10 + +eval instant at 50m count(limitk by (group) (0, http_requests)) +# empty + +eval instant at 50m count(limitk by (group) (-1, http_requests)) +# empty + +# Exercise k==1 special case (as sample is added before the main series loop +eval instant at 50m count(limitk by (group) (1, http_requests) and http_requests) + {} 2 + +eval instant at 50m count(limitk by (group) (2, http_requests) and http_requests) + {} 4 + +eval instant at 50m count(limitk(100, http_requests) and http_requests) + {} 6 + +# Exercise k==1 special case (as sample is added before the main series loop +eval instant at 50m count(limitk by (group) (1, http_requests) and http_requests) + {} 2 + +eval instant at 50m count(limitk by (group) (2, http_requests) and http_requests) + {} 4 + +eval instant at 50m count(limitk(100, http_requests) and http_requests) + {} 6 + +# limit_ratio +eval range from 0 to 50m step 5m count(limit_ratio(0.0, http_requests)) +# empty + +# limitk(2, ...) should always return a 2-count subset of the timeseries (hence the AND'ing) +eval range from 0 to 50m step 5m count(limitk(2, http_requests) and http_requests) + {} 2+0x10 + +# Tests for limit_ratio +# +# NB: below 0.5 ratio will depend on some hashing "luck" (also there's no guarantee that +# an integer comes from: total number of series * ratio), as it depends on: +# +# * ratioLimit = [0.0, 1.0]: +# float64(sample.Metric.Hash()) / float64MaxUint64 < Ratio ? +# * ratioLimit = [-1.0, 1.0): +# float64(sample.Metric.Hash()) / float64MaxUint64 >= (1.0 + Ratio) ? +# +# See `AddRatioSample()` in promql/engine.go for more details. + +# Half~ish samples: verify we get "near" 3 (of 0.5 * 6) +eval range from 0 to 50m step 5m count(limit_ratio(0.5, http_requests) and http_requests) <= bool (3+1) + {} 1+0x10 + +eval range from 0 to 50m step 5m count(limit_ratio(0.5, http_requests) and http_requests) >= bool (3-1) + {} 1+0x10 + +# All samples +eval range from 0 to 50m step 5m count(limit_ratio(1.0, http_requests) and http_requests) + {} 6+0x10 + +# All samples +eval range from 0 to 50m step 5m count(limit_ratio(-1.0, http_requests) and http_requests) + {} 6+0x10 + +# Capped to 1.0 -> all samples +eval_warn range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests) + {} 6+0x10 + +# Capped to -1.0 -> all samples +eval_warn range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests) + {} 6+0x10 + +# Verify that limit_ratio(value) and limit_ratio(1.0-value) return the "complement" of each other +# Complement below for [0.2, -0.8] +# +# Complement 1of2: `or` should return all samples +eval range from 0 to 50m step 5m count(limit_ratio(0.2, http_requests) or limit_ratio(-0.8, http_requests)) + {} 6+0x10 + +# Complement 2of2: `and` should return no samples +eval range from 0 to 50m step 5m count(limit_ratio(0.2, http_requests) and limit_ratio(-0.8, http_requests)) +# empty + +# Complement below for [0.5, -0.5] +eval range from 0 to 50m step 5m count(limit_ratio(0.5, http_requests) or limit_ratio(-0.5, http_requests)) + {} 6+0x10 + +eval range from 0 to 50m step 5m count(limit_ratio(0.5, http_requests) and limit_ratio(-0.5, http_requests)) +# empty + +# Complement below for [0.8, -0.2] +eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) or limit_ratio(-0.2, http_requests)) + {} 6+0x10 + +eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) and limit_ratio(-0.2, http_requests)) +# empty + +# Complement below for [some_ratio, 1.0 - some_ratio], some_ratio derived from time(), +# using a small prime number to avoid rounded ratio values, and a small set of them. +eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio(1.0 - (time() % 17/17), http_requests)) + {} 6+0x10 + +eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio(1.0 - (time() % 17/17), http_requests)) +# empty + +# Poor man's normality check: ok (loaded samples follow a nice linearity over labels and time) +# The check giving: 1 (i.e. true) +eval range from 0 to 50m step 5m abs(avg(limit_ratio(0.5, http_requests)) - avg(limit_ratio(-0.5, http_requests))) <= bool stddev(http_requests) + {} 1+0x10 + diff --git a/vendor/github.com/prometheus/prometheus/tsdb/chunks/chunks.go b/vendor/github.com/prometheus/prometheus/tsdb/chunks/chunks.go index e7df0eeed29..ec0f6d4036a 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/chunks/chunks.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/chunks/chunks.go @@ -133,15 +133,6 @@ type Meta struct { // Time range the data covers. // When MaxTime == math.MaxInt64 the chunk is still open and being appended to. MinTime, MaxTime int64 - - // OOOLastRef, OOOLastMinTime and OOOLastMaxTime are kept as markers for - // overlapping chunks. - // These fields point to the last created out of order Chunk (the head) that existed - // when Series() was called and was overlapping. - // Series() and Chunk() method responses should be consistent for the same - // query even if new data is added in between the calls. - OOOLastRef ChunkRef - OOOLastMinTime, OOOLastMaxTime int64 } // ChunkFromSamples requires all samples to have the same type. diff --git a/vendor/github.com/prometheus/prometheus/tsdb/head_read.go b/vendor/github.com/prometheus/prometheus/tsdb/head_read.go index b8aa574f0e7..b41446e7e5d 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/head_read.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/head_read.go @@ -502,55 +502,24 @@ func (s *memSeries) oooMergedChunks(meta chunks.Meta, cdm chunkDiskMapper, mint, // We create a temporary slice of chunk metas to hold the information of all // possible chunks that may overlap with the requested chunk. - tmpChks := make([]chunkMetaAndChunkDiskMapperRef, 0, len(s.ooo.oooMmappedChunks)) - - oooHeadRef := chunks.ChunkRef(chunks.NewHeadChunkRef(s.ref, s.oooHeadChunkID(len(s.ooo.oooMmappedChunks)))) - if s.ooo.oooHeadChunk != nil && s.ooo.oooHeadChunk.OverlapsClosedInterval(mint, maxt) { - // We only want to append the head chunk if this chunk existed when - // Series() was called. This brings consistency in case new data - // is added in between Series() and Chunk() calls. - if oooHeadRef == meta.OOOLastRef { - tmpChks = append(tmpChks, chunkMetaAndChunkDiskMapperRef{ - meta: chunks.Meta{ - // Ignoring samples added before and after the last known min and max time for this chunk. - MinTime: meta.OOOLastMinTime, - MaxTime: meta.OOOLastMaxTime, - Ref: oooHeadRef, - }, - }) - } - } + tmpChks := make([]chunkMetaAndChunkDiskMapperRef, 0, len(s.ooo.oooMmappedChunks)+1) for i, c := range s.ooo.oooMmappedChunks { - chunkRef := chunks.ChunkRef(chunks.NewHeadChunkRef(s.ref, s.oooHeadChunkID(i))) - // We can skip chunks that came in later than the last known OOOLastRef. - if chunkRef > meta.OOOLastRef { - break - } - - switch { - case chunkRef == meta.OOOLastRef: - tmpChks = append(tmpChks, chunkMetaAndChunkDiskMapperRef{ - meta: chunks.Meta{ - MinTime: meta.OOOLastMinTime, - MaxTime: meta.OOOLastMaxTime, - Ref: chunkRef, - }, - ref: c.ref, - origMinT: c.minTime, - origMaxT: c.maxTime, - }) - case c.OverlapsClosedInterval(mint, maxt): + if c.OverlapsClosedInterval(mint, maxt) { tmpChks = append(tmpChks, chunkMetaAndChunkDiskMapperRef{ meta: chunks.Meta{ MinTime: c.minTime, MaxTime: c.maxTime, - Ref: chunkRef, + Ref: chunks.ChunkRef(chunks.NewHeadChunkRef(s.ref, s.oooHeadChunkID(i))), }, ref: c.ref, }) } } + // Add in data copied from the head OOO chunk. + if meta.Chunk != nil { + tmpChks = append(tmpChks, chunkMetaAndChunkDiskMapperRef{meta: meta}) + } // Next we want to sort all the collected chunks by min time so we can find // those that overlap and stop when we know the rest don't. @@ -563,22 +532,8 @@ func (s *memSeries) oooMergedChunks(meta chunks.Meta, cdm chunkDiskMapper, mint, continue } var iterable chunkenc.Iterable - if c.meta.Ref == oooHeadRef { - var xor *chunkenc.XORChunk - var err error - // If head chunk min and max time match the meta OOO markers - // that means that the chunk has not expanded so we can append - // it as it is. - if s.ooo.oooHeadChunk.minTime == meta.OOOLastMinTime && s.ooo.oooHeadChunk.maxTime == meta.OOOLastMaxTime { - xor, err = s.ooo.oooHeadChunk.chunk.ToXOR() // TODO(jesus.vazquez) (This is an optimization idea that has no priority and might not be that useful) See if we could use a copy of the underlying slice. That would leave the more expensive ToXOR() function only for the usecase where Bytes() is called. - } else { - // We need to remove samples that are outside of the markers - xor, err = s.ooo.oooHeadChunk.chunk.ToXORBetweenTimestamps(meta.OOOLastMinTime, meta.OOOLastMaxTime) - } - if err != nil { - return nil, fmt.Errorf("failed to convert ooo head chunk to xor chunk: %w", err) - } - iterable = xor + if c.meta.Chunk != nil { + iterable = c.meta.Chunk } else { chk, err := cdm.Chunk(c.ref) if err != nil { @@ -588,16 +543,7 @@ func (s *memSeries) oooMergedChunks(meta chunks.Meta, cdm chunkDiskMapper, mint, } return nil, err } - if c.meta.Ref == meta.OOOLastRef && - (c.origMinT != meta.OOOLastMinTime || c.origMaxT != meta.OOOLastMaxTime) { - // The head expanded and was memory mapped so now we need to - // wrap the chunk within a chunk that doesnt allows us to iterate - // through samples out of the OOOLastMinT and OOOLastMaxT - // markers. - iterable = boundedIterable{chk, meta.OOOLastMinTime, meta.OOOLastMaxTime} - } else { - iterable = chk - } + iterable = chk } mc.chunkIterables = append(mc.chunkIterables, iterable) if c.meta.MaxTime > absoluteMax { @@ -608,74 +554,6 @@ func (s *memSeries) oooMergedChunks(meta chunks.Meta, cdm chunkDiskMapper, mint, return mc, nil } -var _ chunkenc.Iterable = &boundedIterable{} - -// boundedIterable is an implementation of chunkenc.Iterable that uses a -// boundedIterator that only iterates through samples which timestamps are -// >= minT and <= maxT. -type boundedIterable struct { - chunk chunkenc.Chunk - minT int64 - maxT int64 -} - -func (b boundedIterable) Iterator(iterator chunkenc.Iterator) chunkenc.Iterator { - it := b.chunk.Iterator(iterator) - if it == nil { - panic("iterator shouldn't be nil") - } - return boundedIterator{it, b.minT, b.maxT} -} - -var _ chunkenc.Iterator = &boundedIterator{} - -// boundedIterator is an implementation of Iterator that only iterates through -// samples which timestamps are >= minT and <= maxT. -type boundedIterator struct { - chunkenc.Iterator - minT int64 - maxT int64 -} - -// Next the first time its called it will advance as many positions as necessary -// until its able to find a sample within the bounds minT and maxT. -// If there are samples within bounds it will advance one by one amongst them. -// If there are no samples within bounds it will return false. -func (b boundedIterator) Next() chunkenc.ValueType { - for b.Iterator.Next() == chunkenc.ValFloat { - t, _ := b.Iterator.At() - switch { - case t < b.minT: - continue - case t > b.maxT: - return chunkenc.ValNone - default: - return chunkenc.ValFloat - } - } - return chunkenc.ValNone -} - -func (b boundedIterator) Seek(t int64) chunkenc.ValueType { - if t < b.minT { - // We must seek at least up to b.minT if it is asked for something before that. - val := b.Iterator.Seek(b.minT) - if !(val == chunkenc.ValFloat) { - return chunkenc.ValNone - } - t, _ := b.Iterator.At() - if t <= b.maxT { - return chunkenc.ValFloat - } - } - if t > b.maxT { - // We seek anyway so that the subsequent Next() calls will also return false. - b.Iterator.Seek(t) - return chunkenc.ValNone - } - return b.Iterator.Seek(t) -} - // safeHeadChunk makes sure that the chunk can be accessed without a race condition. type safeHeadChunk struct { chunkenc.Chunk diff --git a/vendor/github.com/prometheus/prometheus/tsdb/ooo_head_read.go b/vendor/github.com/prometheus/prometheus/tsdb/ooo_head_read.go index acc0d45f7fb..44993546bd8 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/ooo_head_read.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/ooo_head_read.go @@ -94,48 +94,32 @@ func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, builder *labels.Scra tmpChks := make([]chunks.Meta, 0, len(s.ooo.oooMmappedChunks)) - // We define these markers to track the last chunk reference while we - // fill the chunk meta. - // These markers are useful to give consistent responses to repeated queries - // even if new chunks that might be overlapping or not are added afterwards. - // Also, lastMinT and lastMaxT are initialized to the max int as a sentinel - // value to know they are unset. - var lastChunkRef chunks.ChunkRef - lastMinT, lastMaxT := int64(math.MaxInt64), int64(math.MaxInt64) - - addChunk := func(minT, maxT int64, ref chunks.ChunkRef) { - // the first time we get called is for the last included chunk. - // set the markers accordingly - if lastMinT == int64(math.MaxInt64) { - lastChunkRef = ref - lastMinT = minT - lastMaxT = maxT - } - + addChunk := func(minT, maxT int64, ref chunks.ChunkRef, chunk chunkenc.Chunk) { tmpChks = append(tmpChks, chunks.Meta{ - MinTime: minT, - MaxTime: maxT, - Ref: ref, - OOOLastRef: lastChunkRef, - OOOLastMinTime: lastMinT, - OOOLastMaxTime: lastMaxT, + MinTime: minT, + MaxTime: maxT, + Ref: ref, + Chunk: chunk, }) } - // Collect all chunks that overlap the query range, in order from most recent to most old, - // so we can set the correct markers. + // Collect all chunks that overlap the query range. if s.ooo.oooHeadChunk != nil { c := s.ooo.oooHeadChunk if c.OverlapsClosedInterval(oh.mint, oh.maxt) && maxMmapRef == 0 { ref := chunks.ChunkRef(chunks.NewHeadChunkRef(s.ref, s.oooHeadChunkID(len(s.ooo.oooMmappedChunks)))) - addChunk(c.minTime, c.maxTime, ref) + var xor chunkenc.Chunk + if len(c.chunk.samples) > 0 { // Empty samples happens in tests, at least. + xor, _ = c.chunk.ToXOR() // Ignoring error because it can't fail. + } + addChunk(c.minTime, c.maxTime, ref, xor) } } for i := len(s.ooo.oooMmappedChunks) - 1; i >= 0; i-- { c := s.ooo.oooMmappedChunks[i] if c.OverlapsClosedInterval(oh.mint, oh.maxt) && (maxMmapRef == 0 || maxMmapRef.GreaterThanOrEqualTo(c.ref)) && (lastGarbageCollectedMmapRef == 0 || c.ref.GreaterThan(lastGarbageCollectedMmapRef)) { ref := chunks.ChunkRef(chunks.NewHeadChunkRef(s.ref, s.oooHeadChunkID(i))) - addChunk(c.minTime, c.maxTime, ref) + addChunk(c.minTime, c.maxTime, ref, nil) } } @@ -163,6 +147,12 @@ func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, builder *labels.Scra case c.MaxTime > maxTime: maxTime = c.MaxTime (*chks)[len(*chks)-1].MaxTime = c.MaxTime + fallthrough + default: + // If the head OOO chunk is part of an output chunk, copy the chunk pointer. + if c.Chunk != nil { + (*chks)[len(*chks)-1].Chunk = c.Chunk + } } } @@ -191,10 +181,8 @@ func (oh *OOOHeadIndexReader) LabelValues(ctx context.Context, name string, matc } type chunkMetaAndChunkDiskMapperRef struct { - meta chunks.Meta - ref chunks.ChunkDiskMapperRef - origMinT int64 - origMaxT int64 + meta chunks.Meta + ref chunks.ChunkDiskMapperRef } func refLessByMinTimeAndMinRef(a, b chunkMetaAndChunkDiskMapperRef) int { diff --git a/vendor/github.com/prometheus/prometheus/tsdb/testutil.go b/vendor/github.com/prometheus/prometheus/tsdb/testutil.go new file mode 100644 index 00000000000..9730e471327 --- /dev/null +++ b/vendor/github.com/prometheus/prometheus/tsdb/testutil.go @@ -0,0 +1,176 @@ +// Copyright 2017 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package tsdb + +import ( + "testing" + + prom_testutil "github.com/prometheus/client_golang/prometheus/testutil" + + "github.com/stretchr/testify/require" + + "github.com/prometheus/prometheus/model/histogram" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunks" +) + +const ( + float = "float" +) + +type testValue struct { + Ts int64 + V int64 + CounterResetHeader histogram.CounterResetHint +} + +type sampleTypeScenario struct { + sampleType string + appendFunc func(appender storage.Appender, lbls labels.Labels, ts, value int64) (storage.SeriesRef, sample, error) + sampleFunc func(ts, value int64) sample +} + +// TODO: native histogram sample types will be added as part of out-of-order native histogram support; see #11220. +var sampleTypeScenarios = map[string]sampleTypeScenario{ + float: { + sampleType: sampleMetricTypeFloat, + appendFunc: func(appender storage.Appender, lbls labels.Labels, ts, value int64) (storage.SeriesRef, sample, error) { + s := sample{t: ts, f: float64(value)} + ref, err := appender.Append(0, lbls, ts, s.f) + return ref, s, err + }, + sampleFunc: func(ts, value int64) sample { + return sample{t: ts, f: float64(value)} + }, + }, + // intHistogram: { + // sampleType: sampleMetricTypeHistogram, + // appendFunc: func(appender storage.Appender, lbls labels.Labels, ts, value int64) (storage.SeriesRef, sample, error) { + // s := sample{t: ts, h: tsdbutil.GenerateTestHistogram(int(value))} + // ref, err := appender.AppendHistogram(0, lbls, ts, s.h, nil) + // return ref, s, err + // }, + // sampleFunc: func(ts, value int64) sample { + // return sample{t: ts, h: tsdbutil.GenerateTestHistogram(int(value))} + // }, + // }, + // floatHistogram: { + // sampleType: sampleMetricTypeHistogram, + // appendFunc: func(appender storage.Appender, lbls labels.Labels, ts, value int64) (storage.SeriesRef, sample, error) { + // s := sample{t: ts, fh: tsdbutil.GenerateTestFloatHistogram(int(value))} + // ref, err := appender.AppendHistogram(0, lbls, ts, nil, s.fh) + // return ref, s, err + // }, + // sampleFunc: func(ts, value int64) sample { + // return sample{t: ts, fh: tsdbutil.GenerateTestFloatHistogram(int(value))} + // }, + // }, + // gaugeIntHistogram: { + // sampleType: sampleMetricTypeHistogram, + // appendFunc: func(appender storage.Appender, lbls labels.Labels, ts, value int64) (storage.SeriesRef, sample, error) { + // s := sample{t: ts, h: tsdbutil.GenerateTestGaugeHistogram(int(value))} + // ref, err := appender.AppendHistogram(0, lbls, ts, s.h, nil) + // return ref, s, err + // }, + // sampleFunc: func(ts, value int64) sample { + // return sample{t: ts, h: tsdbutil.GenerateTestGaugeHistogram(int(value))} + // }, + // }, + // gaugeFloatHistogram: { + // sampleType: sampleMetricTypeHistogram, + // appendFunc: func(appender storage.Appender, lbls labels.Labels, ts, value int64) (storage.SeriesRef, sample, error) { + // s := sample{t: ts, fh: tsdbutil.GenerateTestGaugeFloatHistogram(int(value))} + // ref, err := appender.AppendHistogram(0, lbls, ts, nil, s.fh) + // return ref, s, err + // }, + // sampleFunc: func(ts, value int64) sample { + // return sample{t: ts, fh: tsdbutil.GenerateTestGaugeFloatHistogram(int(value))} + // }, + // }, +} + +// requireEqualSeries checks that the actual series are equal to the expected ones. It ignores the counter reset hints for histograms. +func requireEqualSeries(t *testing.T, expected, actual map[string][]chunks.Sample, ignoreCounterResets bool) { + for name, expectedItem := range expected { + actualItem, ok := actual[name] + require.True(t, ok, "Expected series %s not found", name) + requireEqualSamples(t, name, expectedItem, actualItem, ignoreCounterResets) + } + for name := range actual { + _, ok := expected[name] + require.True(t, ok, "Unexpected series %s", name) + } +} + +func requireEqualOOOSamples(t *testing.T, expectedSamples int, db *DB) { + require.Equal(t, float64(expectedSamples), + prom_testutil.ToFloat64(db.head.metrics.outOfOrderSamplesAppended.WithLabelValues(sampleMetricTypeFloat))+ + prom_testutil.ToFloat64(db.head.metrics.outOfOrderSamplesAppended.WithLabelValues(sampleMetricTypeHistogram)), + "number of ooo appended samples mismatch") +} + +func requireEqualSamples(t *testing.T, name string, expected, actual []chunks.Sample, ignoreCounterResets bool) { + require.Equal(t, len(expected), len(actual), "Length not equal to expected for %s", name) + for i, s := range expected { + expectedSample := s + actualSample := actual[i] + require.Equal(t, expectedSample.T(), actualSample.T(), "Different timestamps for %s[%d]", name, i) + require.Equal(t, expectedSample.Type().String(), actualSample.Type().String(), "Different types for %s[%d] at ts %d", name, i, expectedSample.T()) + switch { + case s.H() != nil: + { + expectedHist := expectedSample.H() + actualHist := actualSample.H() + if ignoreCounterResets && expectedHist.CounterResetHint != histogram.GaugeType { + expectedHist.CounterResetHint = histogram.UnknownCounterReset + actualHist.CounterResetHint = histogram.UnknownCounterReset + } else { + require.Equal(t, expectedHist.CounterResetHint, actualHist.CounterResetHint, "Sample header doesn't match for %s[%d] at ts %d, expected: %s, actual: %s", name, i, expectedSample.T(), counterResetAsString(expectedHist.CounterResetHint), counterResetAsString(actualHist.CounterResetHint)) + } + require.Equal(t, expectedHist, actualHist, "Sample doesn't match for %s[%d] at ts %d", name, i, expectedSample.T()) + } + case s.FH() != nil: + { + expectedHist := expectedSample.FH() + actualHist := actualSample.FH() + if ignoreCounterResets { + expectedHist.CounterResetHint = histogram.UnknownCounterReset + actualHist.CounterResetHint = histogram.UnknownCounterReset + } else { + require.Equal(t, expectedHist.CounterResetHint, actualHist.CounterResetHint, "Sample header doesn't match for %s[%d] at ts %d, expected: %s, actual: %s", name, i, expectedSample.T(), counterResetAsString(expectedHist.CounterResetHint), counterResetAsString(actualHist.CounterResetHint)) + } + require.Equal(t, expectedHist, actualHist, "Sample doesn't match for %s[%d] at ts %d", name, i, expectedSample.T()) + } + default: + expectedFloat := expectedSample.F() + actualFloat := actualSample.F() + require.Equal(t, expectedFloat, actualFloat, "Sample doesn't match for %s[%d] at ts %d", name, i, expectedSample.T()) + } + } +} + +func counterResetAsString(h histogram.CounterResetHint) string { + switch h { + case histogram.UnknownCounterReset: + return "UnknownCounterReset" + case histogram.CounterReset: + return "CounterReset" + case histogram.NotCounterReset: + return "NotCounterReset" + case histogram.GaugeType: + return "GaugeType" + } + panic("Unexpected counter reset type") +} diff --git a/vendor/github.com/prometheus/prometheus/util/annotations/annotations.go b/vendor/github.com/prometheus/prometheus/util/annotations/annotations.go index 6415f447444..40a20e4b923 100644 --- a/vendor/github.com/prometheus/prometheus/util/annotations/annotations.go +++ b/vendor/github.com/prometheus/prometheus/util/annotations/annotations.go @@ -116,6 +116,7 @@ var ( PromQLInfo = errors.New("PromQL info") PromQLWarning = errors.New("PromQL warning") + InvalidRatioWarning = fmt.Errorf("%w: ratio value should be between -1 and 1", PromQLWarning) InvalidQuantileWarning = fmt.Errorf("%w: quantile value should be between 0 and 1", PromQLWarning) BadBucketLabelWarning = fmt.Errorf("%w: bucket label %q is missing or has a malformed value", PromQLWarning, model.BucketLabel) MixedFloatsHistogramsWarning = fmt.Errorf("%w: encountered a mix of histograms and floats for", PromQLWarning) @@ -155,6 +156,15 @@ func NewInvalidQuantileWarning(q float64, pos posrange.PositionRange) error { } } +// NewInvalidQuantileWarning is used when the user specifies an invalid ratio +// value, i.e. a float that is outside the range [-1, 1] or NaN. +func NewInvalidRatioWarning(q, to float64, pos posrange.PositionRange) error { + return annoErr{ + PositionRange: pos, + Err: fmt.Errorf("%w, got %g, capping to %g", InvalidRatioWarning, q, to), + } +} + // NewBadBucketLabelWarning is used when there is an error parsing the bucket label // of a classic histogram. func NewBadBucketLabelWarning(metricName, label string, pos posrange.PositionRange) error { diff --git a/vendor/modules.txt b/vendor/modules.txt index 9e6f4d8562a..4edf8538e7c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -610,8 +610,8 @@ github.com/grafana/alerting/receivers/webex github.com/grafana/alerting/receivers/webhook github.com/grafana/alerting/receivers/wecom github.com/grafana/alerting/templates -# github.com/grafana/dskit v0.0.0-20240626184720-35810fdf1c6d -## explicit; go 1.20 +# github.com/grafana/dskit v0.0.0-20240704113758-97b2aa96bcb1 +## explicit; go 1.21 github.com/grafana/dskit/backoff github.com/grafana/dskit/ballast github.com/grafana/dskit/cache @@ -997,7 +997,7 @@ github.com/prometheus/exporter-toolkit/web github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/prometheus/prometheus v1.99.0 => github.com/grafana/mimir-prometheus v0.0.0-20240703103907-5131622acfee +# github.com/prometheus/prometheus v1.99.0 => github.com/grafana/mimir-prometheus v0.0.0-20240704133652-fb0cb30e280c ## explicit; go 1.21 github.com/prometheus/prometheus/config github.com/prometheus/prometheus/discovery @@ -1647,7 +1647,7 @@ sigs.k8s.io/kustomize/kyaml/yaml/walk sigs.k8s.io/yaml sigs.k8s.io/yaml/goyaml.v2 sigs.k8s.io/yaml/goyaml.v3 -# github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v0.0.0-20240703103907-5131622acfee +# github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v0.0.0-20240704133652-fb0cb30e280c # github.com/hashicorp/memberlist => github.com/grafana/memberlist v0.3.1-0.20220714140823-09ffed8adbbe # gopkg.in/yaml.v3 => github.com/colega/go-yaml-yaml v0.0.0-20220720105220-255a8d16d094 # github.com/grafana/regexp => github.com/grafana/regexp v0.0.0-20240531075221-3685f1377d7b