diff --git a/.bingo/Variables.mk b/.bingo/Variables.mk index 506662a5b7..81c734f867 100644 --- a/.bingo/Variables.mk +++ b/.bingo/Variables.mk @@ -131,12 +131,6 @@ $(PROTOC_GEN_GOGOFAST): $(BINGO_DIR)/protoc-gen-gogofast.mod @echo "(re)installing $(GOBIN)/protoc-gen-gogofast-v1.3.2" @cd $(BINGO_DIR) && GOWORK=off $(GO) build -mod=mod -modfile=protoc-gen-gogofast.mod -o=$(GOBIN)/protoc-gen-gogofast-v1.3.2 "github.com/gogo/protobuf/protoc-gen-gogofast" -PROTOC_GO_INJECT_TAG := $(GOBIN)/protoc-go-inject-tag-v1.4.0 -$(PROTOC_GO_INJECT_TAG): $(BINGO_DIR)/protoc-go-inject-tag.mod - @# Install binary/ries using Go 1.14+ build command. This is using bwplotka/bingo-controlled, separate go module with pinned dependencies. - @echo "(re)installing $(GOBIN)/protoc-go-inject-tag-v1.4.0" - @cd $(BINGO_DIR) && GOWORK=off $(GO) build -mod=mod -modfile=protoc-go-inject-tag.mod -o=$(GOBIN)/protoc-go-inject-tag-v1.4.0 "github.com/favadi/protoc-go-inject-tag" - SHFMT := $(GOBIN)/shfmt-v3.8.0 $(SHFMT): $(BINGO_DIR)/shfmt.mod @# Install binary/ries using Go 1.14+ build command. This is using bwplotka/bingo-controlled, separate go module with pinned dependencies. diff --git a/.bingo/protoc-go-inject-tag.mod b/.bingo/protoc-go-inject-tag.mod deleted file mode 100644 index 6bfbb75c88..0000000000 --- a/.bingo/protoc-go-inject-tag.mod +++ /dev/null @@ -1,5 +0,0 @@ -module _ // Auto generated by https://github.com/bwplotka/bingo. DO NOT EDIT - -go 1.22.6 - -require github.com/favadi/protoc-go-inject-tag v1.4.0 diff --git a/.bingo/protoc-go-inject-tag.sum b/.bingo/protoc-go-inject-tag.sum deleted file mode 100644 index 6426930445..0000000000 --- a/.bingo/protoc-go-inject-tag.sum +++ /dev/null @@ -1,2 +0,0 @@ -github.com/favadi/protoc-go-inject-tag v1.4.0 h1:K3KXxbgRw5WT4f43LbglARGz/8jVsDOS7uMjG4oNvXY= -github.com/favadi/protoc-go-inject-tag v1.4.0/go.mod h1:AZ+PK+QDKUOLlBRG0rYiKkUX5Hw7+7GTFzlU99GFSbQ= diff --git a/.bingo/variables.env b/.bingo/variables.env index 9d237a8064..75ba4f293b 100644 --- a/.bingo/variables.env +++ b/.bingo/variables.env @@ -46,7 +46,5 @@ PROMU="${GOBIN}/promu-v0.5.0" PROTOC_GEN_GOGOFAST="${GOBIN}/protoc-gen-gogofast-v1.3.2" -PROTOC_GO_INJECT_TAG="${GOBIN}/protoc-go-inject-tag-v1.4.0" - SHFMT="${GOBIN}/shfmt-v3.8.0" diff --git a/.circleci/config.yml b/.circleci/config.yml index dbe302db51..67d17a8b3c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,7 +10,6 @@ executors: docker: - image: cimg/go:1.22-node golang-test: - resource_class: 'large' docker: - image: cimg/go:1.22-node - image: quay.io/thanos/docker-swift-onlyone-authv2-keystone:v0.1 diff --git a/Makefile b/Makefile index 0a5f9917fe..29e1d5cc87 100644 --- a/Makefile +++ b/Makefile @@ -292,8 +292,8 @@ go-format: $(GOIMPORTS) .PHONY: proto proto: ## Generates Go files from Thanos proto files. -proto: check-git $(GOIMPORTS) $(PROTOC) $(PROTOC_GEN_GOGOFAST) $(PROTOC_GO_INJECT_TAG) - @GOIMPORTS_BIN="$(GOIMPORTS)" PROTOC_BIN="$(PROTOC)" PROTOC_GEN_GOGOFAST_BIN="$(PROTOC_GEN_GOGOFAST)" PROTOC_VERSION="$(PROTOC_VERSION)" PROTOC_GO_INJECT_TAG_BIN="$(PROTOC_GO_INJECT_TAG)" scripts/genproto.sh +proto: check-git $(GOIMPORTS) $(PROTOC) $(PROTOC_GEN_GOGOFAST) + @GOIMPORTS_BIN="$(GOIMPORTS)" PROTOC_BIN="$(PROTOC)" PROTOC_GEN_GOGOFAST_BIN="$(PROTOC_GEN_GOGOFAST)" PROTOC_VERSION="$(PROTOC_VERSION)" scripts/genproto.sh .PHONY: tarballs-release tarballs-release: ## Build tarballs. diff --git a/cmd/thanos/query.go b/cmd/thanos/query.go index eaf14a74af..bbdc3c5d32 100644 --- a/cmd/thanos/query.go +++ b/cmd/thanos/query.go @@ -796,7 +796,7 @@ func runQuery( infoSrv := info.NewInfoServer( component.Query.String(), - info.WithLabelSetFunc(func() []*labelpb.LabelSet { return proxyStore.LabelSet() }), + info.WithLabelSetFunc(func() []labelpb.LabelSet { return proxyStore.LabelSet() }), info.WithStoreInfoFunc(func() (*infopb.StoreInfo, error) { if httpProbe.IsReady() { mint, maxt := proxyStore.TimeRange() diff --git a/cmd/thanos/receive.go b/cmd/thanos/receive.go index c517cf8aa5..985c15bc1e 100644 --- a/cmd/thanos/receive.go +++ b/cmd/thanos/receive.go @@ -343,7 +343,7 @@ func runReceive( infoSrv := info.NewInfoServer( component.Receive.String(), - info.WithLabelSetFunc(func() []*labelpb.LabelSet { return proxy.LabelSet() }), + info.WithLabelSetFunc(func() []labelpb.LabelSet { return proxy.LabelSet() }), info.WithStoreInfoFunc(func() (*infopb.StoreInfo, error) { if httpProbe.IsReady() { minTime, maxTime := proxy.TimeRange() diff --git a/cmd/thanos/rule.go b/cmd/thanos/rule.go index 7f418361ab..10f687c2ee 100644 --- a/cmd/thanos/rule.go +++ b/cmd/thanos/rule.go @@ -739,7 +739,7 @@ func runRule( tsdbStore := store.NewTSDBStore(logger, tsdbDB, component.Rule, conf.lset) infoOptions = append( infoOptions, - info.WithLabelSetFunc(func() []*labelpb.LabelSet { + info.WithLabelSetFunc(func() []labelpb.LabelSet { return tsdbStore.LabelSet() }), info.WithStoreInfoFunc(func() (*infopb.StoreInfo, error) { diff --git a/cmd/thanos/sidecar.go b/cmd/thanos/sidecar.go index 014b9fc9df..b5ce27f94b 100644 --- a/cmd/thanos/sidecar.go +++ b/cmd/thanos/sidecar.go @@ -312,7 +312,7 @@ func runSidecar( infoSrv := info.NewInfoServer( component.Sidecar.String(), - info.WithLabelSetFunc(func() []*labelpb.LabelSet { + info.WithLabelSetFunc(func() []labelpb.LabelSet { return promStore.LabelSet() }), info.WithStoreInfoFunc(func() (*infopb.StoreInfo, error) { diff --git a/cmd/thanos/store.go b/cmd/thanos/store.go index 319dc929c2..dbd9886007 100644 --- a/cmd/thanos/store.go +++ b/cmd/thanos/store.go @@ -496,7 +496,7 @@ func runStore( infoSrv := info.NewInfoServer( component.Store.String(), - info.WithLabelSetFunc(func() []*labelpb.LabelSet { + info.WithLabelSetFunc(func() []labelpb.LabelSet { return bs.LabelSet() }), info.WithStoreInfoFunc(func() (*infopb.StoreInfo, error) { diff --git a/internal/cortex/cortexpb/compat.go b/internal/cortex/cortexpb/compat.go index 52015a55f3..84f80861c8 100644 --- a/internal/cortex/cortexpb/compat.go +++ b/internal/cortex/cortexpb/compat.go @@ -15,45 +15,46 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" + "github.com/thanos-io/thanos/internal/cortex/util" ) -func LabelPairToModelMetric(labels []*LabelPair) model.Metric { - m := make(model.Metric, len(labels)) - for _, l := range labels { - m[model.LabelName(l.Name)] = model.LabelValue(l.Value) - } - - return m +// FromLabelAdaptersToLabels casts []LabelAdapter to labels.Labels. +// It uses unsafe, but as LabelAdapter == labels.Label this should be safe. +// This allows us to use labels.Labels directly in protos. +// +// Note: while resulting labels.Labels is supposedly sorted, this function +// doesn't enforce that. If input is not sorted, output will be wrong. +func FromLabelAdaptersToLabels(ls []LabelAdapter) labels.Labels { + return *(*labels.Labels)(unsafe.Pointer(&ls)) } -func LabelMapToCortexMetric(lbls map[string]string) []*LabelPair { - labels := make([]*LabelPair, 0, len(lbls)) - for ln, lv := range lbls { - labels = append(labels, &LabelPair{ - Name: []byte(ln), - Value: []byte(lv), - }) - } - sort.Slice(labels, func(i, j int) bool { - return strings.Compare(string(labels[i].Name), string(labels[j].Name)) < 0 - }) +// FromLabelsToLabelAdapters casts labels.Labels to []LabelAdapter. +// It uses unsafe, but as LabelAdapter == labels.Label this should be safe. +// This allows us to use labels.Labels directly in protos. +func FromLabelsToLabelAdapters(ls labels.Labels) []LabelAdapter { + return *(*[]LabelAdapter)(unsafe.Pointer(&ls)) +} - return labels +// FromLabelAdaptersToMetric converts []LabelAdapter to a model.Metric. +// Don't do this on any performance sensitive paths. +func FromLabelAdaptersToMetric(ls []LabelAdapter) model.Metric { + return util.LabelsToMetric(FromLabelAdaptersToLabels(ls)) } -func ModelMetricToCortexMetric(m model.Metric) []*LabelPair { - labels := make([]*LabelPair, 0, len(m)) - for ln, lv := range m { - labels = append(labels, &LabelPair{ - Name: []byte(ln), - Value: []byte(lv), +// FromMetricsToLabelAdapters converts model.Metric to []LabelAdapter. +// Don't do this on any performance sensitive paths. +// The result is sorted. +func FromMetricsToLabelAdapters(metric model.Metric) []LabelAdapter { + result := make([]LabelAdapter, 0, len(metric)) + for k, v := range metric { + result = append(result, LabelAdapter{ + Name: string(k), + Value: string(v), }) } - sort.Slice(labels, func(i, j int) bool { - return strings.Compare(string(labels[i].Name), string(labels[j].Name)) < 0 - }) - - return labels + sort.Sort(byLabel(result)) // The labels should be sorted upon initialisation. + return result } type byLabel []LabelAdapter diff --git a/internal/cortex/cortexpb/cortex.proto b/internal/cortex/cortexpb/cortex.proto index 0c46fb6d8a..0ff9dd9e01 100644 --- a/internal/cortex/cortexpb/cortex.proto +++ b/internal/cortex/cortexpb/cortex.proto @@ -7,14 +7,19 @@ package cortexpb; option go_package = "cortexpb"; +import "gogoproto/gogo.proto"; + +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; + message WriteRequest { - repeated TimeSeries timeseries = 1; + repeated TimeSeries timeseries = 1 [(gogoproto.nullable) = false, (gogoproto.customtype) = "PreallocTimeseries"]; enum SourceEnum { API = 0; RULE = 1; } SourceEnum Source = 2; - repeated MetricMetadata metadata = 3; + repeated MetricMetadata metadata = 3 [(gogoproto.nullable) = true]; bool skip_label_name_validation = 1000; //set intentionally high to keep WriteRequest compatible with upstream Prometheus } @@ -22,10 +27,10 @@ message WriteRequest { message WriteResponse {} message TimeSeries { - repeated LabelPair labels = 1; + repeated LabelPair labels = 1 [(gogoproto.nullable) = false, (gogoproto.customtype) = "LabelAdapter"]; // Sorted by time, oldest sample first. - repeated Sample samples = 2; - repeated Exemplar exemplars = 3; + repeated Sample samples = 2 [(gogoproto.nullable) = false]; + repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false]; } message LabelPair { @@ -57,12 +62,12 @@ message MetricMetadata { } message Metric { - repeated LabelPair labels = 1; + repeated LabelPair labels = 1 [(gogoproto.nullable) = false, (gogoproto.customtype) = "LabelAdapter"]; } message Exemplar { // Exemplar labels, different than series labels - repeated LabelPair labels = 1; + repeated LabelPair labels = 1 [(gogoproto.nullable) = false, (gogoproto.customtype) = "LabelAdapter"]; double value = 2; int64 timestamp_ms = 3; } diff --git a/internal/cortex/querier/queryrange/compat.go b/internal/cortex/querier/queryrange/compat.go index e18805ffd6..9d7e77720a 100644 --- a/internal/cortex/querier/queryrange/compat.go +++ b/internal/cortex/querier/queryrange/compat.go @@ -18,14 +18,14 @@ func toModelSampleHistogramPair(s SampleHistogramPair) model.SampleHistogramPair } } -func fromModelSampleHistogramPair(modelSampleHistogram model.SampleHistogramPair) (s *SampleHistogramPair) { - return &SampleHistogramPair{ +func fromModelSampleHistogramPair(modelSampleHistogram model.SampleHistogramPair) (s SampleHistogramPair) { + return SampleHistogramPair{ Timestamp: int64(modelSampleHistogram.Timestamp), Histogram: fromModelSampleHistogram(modelSampleHistogram.Histogram), } } -func fromModelSampleHistogram(modelSampleHistogram *model.SampleHistogram) (s *SampleHistogram) { +func fromModelSampleHistogram(modelSampleHistogram *model.SampleHistogram) (s SampleHistogram) { buckets := make([]*HistogramBucket, len(modelSampleHistogram.Buckets)) for i, b := range modelSampleHistogram.Buckets { @@ -37,14 +37,14 @@ func fromModelSampleHistogram(modelSampleHistogram *model.SampleHistogram) (s *S } } - return &SampleHistogram{ + return SampleHistogram{ Count: float64(modelSampleHistogram.Count), Sum: float64(modelSampleHistogram.Sum), Buckets: buckets, } } -func toModelSampleHistogram(s *SampleHistogram) *model.SampleHistogram { +func toModelSampleHistogram(s SampleHistogram) *model.SampleHistogram { modelBuckets := make([]*model.HistogramBucket, len(s.Buckets)) for i, b := range s.Buckets { diff --git a/internal/cortex/querier/queryrange/marshaling_test.go b/internal/cortex/querier/queryrange/marshaling_test.go index b6e3c139f4..3c4bf11c68 100644 --- a/internal/cortex/querier/queryrange/marshaling_test.go +++ b/internal/cortex/querier/queryrange/marshaling_test.go @@ -60,25 +60,25 @@ func BenchmarkPrometheusCodec_EncodeResponse(b *testing.B) { } func mockPrometheusResponse(numSeries, numSamplesPerSeries int) *PrometheusResponse { - stream := make([]*SampleStream, numSeries) + stream := make([]SampleStream, numSeries) for s := 0; s < numSeries; s++ { // Generate random samples. - samples := make([]*cortexpb.Sample, numSamplesPerSeries) + samples := make([]cortexpb.Sample, numSamplesPerSeries) for i := 0; i < numSamplesPerSeries; i++ { - samples[i] = &cortexpb.Sample{ + samples[i] = cortexpb.Sample{ Value: rand.Float64(), TimestampMs: int64(i), } } // Generate random labels. - lbls := make([]*cortexpb.LabelPair, 10) + lbls := make([]cortexpb.LabelAdapter, 10) for i := range lbls { - lbls[i].Name = []byte("a_medium_size_label_name") - lbls[i].Value = []byte("a_medium_size_label_value_that_is_used_to_benchmark_marshalling") + lbls[i].Name = "a_medium_size_label_name" + lbls[i].Value = "a_medium_size_label_value_that_is_used_to_benchmark_marshalling" } - stream[s] = &SampleStream{ + stream[s] = SampleStream{ Labels: lbls, Samples: samples, } @@ -86,7 +86,7 @@ func mockPrometheusResponse(numSeries, numSamplesPerSeries int) *PrometheusRespo return &PrometheusResponse{ Status: "success", - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: "vector", Result: stream, }, diff --git a/internal/cortex/querier/queryrange/query_range.go b/internal/cortex/querier/queryrange/query_range.go index 3440210339..d2d7ba4562 100644 --- a/internal/cortex/querier/queryrange/query_range.go +++ b/internal/cortex/querier/queryrange/query_range.go @@ -19,6 +19,7 @@ import ( "unsafe" "github.com/gogo/protobuf/proto" + github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" "github.com/gogo/status" jsoniter "github.com/json-iterator/go" "github.com/opentracing/opentracing-go" @@ -86,7 +87,7 @@ type Request interface { // GetQuery returns the query of the request. GetQuery() string // GetCachingOptions returns the caching options. - GetCachingOptions() *CachingOptions + GetCachingOptions() CachingOptions // WithStartEnd clone the current request with different start and end timestamp. WithStartEnd(startTime int64, endTime int64) Request // WithQuery clone the current request with a different query. @@ -104,7 +105,7 @@ type Request interface { type Response interface { proto.Message // GetHeaders returns the HTTP headers in the response. - GetQueryRangeHeaders() []*PrometheusResponseHeader + GetHeaders() []*PrometheusResponseHeader // GetStats returns the Prometheus query stats in the response. GetStats() *PrometheusResponseStats } @@ -188,9 +189,9 @@ func (resp *PrometheusInstantQueryResponse) GetStats() *PrometheusResponseStats func NewEmptyPrometheusResponse() *PrometheusResponse { return &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: model.ValMatrix.String(), - Result: []*SampleStream{}, + Result: []SampleStream{}, }, } } @@ -199,9 +200,9 @@ func NewEmptyPrometheusResponse() *PrometheusResponse { func NewEmptyPrometheusInstantQueryResponse() *PrometheusInstantQueryResponse { return &PrometheusInstantQueryResponse{ Status: StatusSuccess, - Data: &PrometheusInstantQueryData{ + Data: PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &PrometheusInstantQueryResult{ + Result: PrometheusInstantQueryResult{ Result: &PrometheusInstantQueryResult_Vector{}, }, }, @@ -235,15 +236,7 @@ func AnalyzesMerge(analysis ...*Analysis) *Analysis { traverseAnalysis(a, &elements) for i := 0; i < len(elements) && i < len(rootElements); i++ { - if rootElements[i].ExecutionTime == nil { - rootElements[i].ExecutionTime = elements[i].ExecutionTime - continue - } - if elements[i].ExecutionTime == nil { - continue - } - rootElements[i].ExecutionTime.Nanos += elements[i].ExecutionTime.Nanos - rootElements[i].ExecutionTime.Seconds += elements[i].ExecutionTime.Seconds + rootElements[i].ExecutionTime += analysis[i].ExecutionTime } } @@ -282,7 +275,7 @@ func (prometheusCodec) MergeResponse(_ Request, responses ...Response) (Response response := PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: model.ValMatrix.String(), Result: matrixMerge(promResponses), Stats: StatsMerge(responses), @@ -478,12 +471,12 @@ func (s *SampleStream) UnmarshalJSON(data []byte) error { return err } - s.Labels = cortexpb.ModelMetricToCortexMetric(sampleStream.Metric) + s.Labels = cortexpb.FromMetricsToLabelAdapters(sampleStream.Metric) if len(sampleStream.Values) > 0 { - s.Samples = make([]*cortexpb.Sample, 0, len(sampleStream.Values)) + s.Samples = make([]cortexpb.Sample, 0, len(sampleStream.Values)) for _, sample := range sampleStream.Values { - s.Samples = append(s.Samples, &cortexpb.Sample{ + s.Samples = append(s.Samples, cortexpb.Sample{ Value: float64(sample.Value), TimestampMs: int64(sample.Timestamp), }) @@ -491,7 +484,7 @@ func (s *SampleStream) UnmarshalJSON(data []byte) error { } if len(sampleStream.Histograms) > 0 { - s.Histograms = make([]*SampleHistogramPair, 0, len(sampleStream.Histograms)) + s.Histograms = make([]SampleHistogramPair, 0, len(sampleStream.Histograms)) for _, h := range sampleStream.Histograms { s.Histograms = append(s.Histograms, fromModelSampleHistogramPair(h)) } @@ -503,7 +496,7 @@ func (s *SampleStream) UnmarshalJSON(data []byte) error { // MarshalJSON implements json.Marshaler. func (s *SampleStream) MarshalJSON() ([]byte, error) { var sampleStream model.SampleStream - sampleStream.Metric = cortexpb.LabelPairToModelMetric(s.Labels) + sampleStream.Metric = cortexpb.FromLabelAdaptersToMetric(s.Labels) sampleStream.Values = make([]model.SamplePair, 0, len(s.Samples)) for _, sample := range s.Samples { @@ -515,7 +508,7 @@ func (s *SampleStream) MarshalJSON() ([]byte, error) { sampleStream.Histograms = make([]model.SampleHistogramPair, 0, len(s.Histograms)) for _, h := range s.Histograms { - sampleStream.Histograms = append(sampleStream.Histograms, toModelSampleHistogramPair(*h)) + sampleStream.Histograms = append(sampleStream.Histograms, toModelSampleHistogramPair(h)) } return json.Marshal(sampleStream) @@ -528,13 +521,13 @@ func (s *Sample) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &sample); err != nil { return err } - s.Labels = cortexpb.ModelMetricToCortexMetric(sample.Metric) + s.Labels = cortexpb.FromMetricsToLabelAdapters(sample.Metric) s.SampleValue = float64(sample.Value) s.Timestamp = int64(sample.Timestamp) if sample.Histogram != nil { sh := fromModelSampleHistogram(sample.Histogram) - s.Histogram = sh + s.Histogram = &sh } else { s.Histogram = nil } @@ -545,11 +538,11 @@ func (s *Sample) UnmarshalJSON(data []byte) error { // MarshalJSON implements json.Marshaler. func (s *Sample) MarshalJSON() ([]byte, error) { var sample model.Sample - sample.Metric = cortexpb.LabelPairToModelMetric(s.Labels) + sample.Metric = cortexpb.FromLabelAdaptersToMetric(s.Labels) sample.Value = model.SampleValue(s.SampleValue) sample.Timestamp = model.Time(s.Timestamp) if s.Histogram != nil { - sample.Histogram = toModelSampleHistogram(s.Histogram) + sample.Histogram = toModelSampleHistogram(*s.Histogram) } return json.Marshal(sample) } @@ -602,7 +595,7 @@ func (s *PrometheusInstantQueryData) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &result); err != nil { return err } - s.Result = &PrometheusInstantQueryResult{ + s.Result = PrometheusInstantQueryResult{ Result: &PrometheusInstantQueryResult_Vector{Vector: &Vector{ Samples: result.Samples, }}, @@ -614,7 +607,7 @@ func (s *PrometheusInstantQueryData) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &result); err != nil { return err } - s.Result = &PrometheusInstantQueryResult{ + s.Result = PrometheusInstantQueryResult{ Result: &PrometheusInstantQueryResult_Matrix{Matrix: &Matrix{ SampleStreams: result.SampleStreams, }}, @@ -626,7 +619,7 @@ func (s *PrometheusInstantQueryData) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &result); err != nil { return err } - s.Result = &PrometheusInstantQueryResult{ + s.Result = PrometheusInstantQueryResult{ Result: &PrometheusInstantQueryResult_Scalar{Scalar: &result.Scalar}, } case model.ValString.String(): @@ -636,7 +629,7 @@ func (s *PrometheusInstantQueryData) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &result); err != nil { return err } - s.Result = &PrometheusInstantQueryResult{ + s.Result = PrometheusInstantQueryResult{ Result: &PrometheusInstantQueryResult_StringSample{StringSample: &StringSample{ TimestampMs: int64(result.Sample.Timestamp), Value: result.Sample.Value, @@ -749,13 +742,11 @@ func StatsMerge(resps []Response) *PrometheusResponseStats { return result } -func matrixMerge(resps []*PrometheusResponse) []*SampleStream { +func matrixMerge(resps []*PrometheusResponse) []SampleStream { output := map[string]*SampleStream{} for _, resp := range resps { for _, stream := range resp.Data.Result { - stream := stream - - metric := cortexpb.LabelPairToModelMetric(stream.Labels).String() + metric := cortexpb.FromLabelAdaptersToLabels(stream.Labels).String() existing, ok := output[metric] if !ok { existing = &SampleStream{ @@ -799,9 +790,9 @@ func matrixMerge(resps []*PrometheusResponse) []*SampleStream { } sort.Strings(keys) - result := make([]*SampleStream, 0, len(output)) + result := make([]SampleStream, 0, len(output)) for _, key := range keys { - result = append(result, output[key]) + result = append(result, *output[key]) } return result @@ -811,7 +802,7 @@ func matrixMerge(resps []*PrometheusResponse) []*SampleStream { // return a sub slice whose first element's is the smallest timestamp that is strictly // bigger than the given minTs. Empty slice is returned if minTs is bigger than all the // timestamps in samples. -func SliceSamples(samples []*cortexpb.Sample, minTs int64) []*cortexpb.Sample { +func SliceSamples(samples []cortexpb.Sample, minTs int64) []cortexpb.Sample { if len(samples) <= 0 || minTs < samples[0].TimestampMs { return samples } @@ -831,7 +822,7 @@ func SliceSamples(samples []*cortexpb.Sample, minTs int64) []*cortexpb.Sample { // return a sub slice whose first element's is the smallest timestamp that is strictly // bigger than the given minTs. Empty slice is returned if minTs is bigger than all the // timestamps in histogram. -func SliceHistogram(histograms []*SampleHistogramPair, minTs int64) []*SampleHistogramPair { +func SliceHistogram(histograms []SampleHistogramPair, minTs int64) []SampleHistogramPair { if len(histograms) <= 0 || minTs < histograms[0].GetTimestamp() { return histograms } @@ -916,8 +907,10 @@ func init() { jsoniter.RegisterTypeDecoderFunc("queryrange.PrometheusResponseQueryableSamplesStatsPerStep", PrometheusResponseQueryableSamplesStatsPerStepJsoniterDecode) } +type Duration time.Duration + func (d Duration) MarshalJSON() ([]byte, error) { - return json.Marshal(time.Duration(d.Seconds*int64(time.Second) + int64(d.Nanos)).String()) + return json.Marshal(time.Duration(d).String()) } func (d *Duration) UnmarshalJSON(b []byte) error { @@ -927,29 +920,33 @@ func (d *Duration) UnmarshalJSON(b []byte) error { } switch value := v.(type) { case float64: - *d = Duration{ - Seconds: int64(value), - } + *d = Duration(time.Duration(value)) return nil case string: tmp, err := time.ParseDuration(value) if err != nil { return err } - *d = Duration{ - Seconds: int64(tmp / time.Second), - Nanos: int32(tmp % time.Second), - } + *d = Duration(tmp) return nil default: return errors.New("invalid duration") } } -func (r *PrometheusResponse) GetQueryRangeHeaders() []*PrometheusResponseHeader { - return r.Headers +func (d *Duration) Size() int { + return github_com_gogo_protobuf_types.SizeOfStdDuration(time.Duration(*d)) +} + +func (d *Duration) Unmarshal(b []byte) error { + var td time.Duration + if err := github_com_gogo_protobuf_types.StdDurationUnmarshal(&td, b); err != nil { + return err + } + *d = Duration(td) + return nil } -func (r *PrometheusInstantQueryResponse) GetQueryRangeHeaders() []*PrometheusResponseHeader { - return r.Headers +func (d *Duration) MarshalTo(b []byte) (int, error) { + return github_com_gogo_protobuf_types.StdDurationMarshalTo(time.Duration(*d), b) } diff --git a/internal/cortex/querier/queryrange/query_range_test.go b/internal/cortex/querier/queryrange/query_range_test.go index 204aa42352..121459de5e 100644 --- a/internal/cortex/querier/queryrange/query_range_test.go +++ b/internal/cortex/querier/queryrange/query_range_test.go @@ -10,6 +10,7 @@ import ( "net/http" "strconv" "testing" + "time" jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" @@ -131,14 +132,14 @@ func TestResponseWithStats(t *testing.T) { body: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"foo":"bar"},"values":[[1536673680,"137"],[1536673780,"137"]]}],"stats":{"samples":{"totalQueryableSamples":10,"totalQueryableSamplesPerStep":[[1536673680,5],[1536673780,5]]}},"analysis":null}}`, expected: &PrometheusResponse{ Status: "success", - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: model.ValMatrix.String(), - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{ - {Name: []byte("foo"), Value: []byte("bar")}, + Labels: []cortexpb.LabelAdapter{ + {Name: "foo", Value: "bar"}, }, - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {Value: 137, TimestampMs: 1536673680000}, {Value: 137, TimestampMs: 1536673780000}, }, @@ -160,20 +161,18 @@ func TestResponseWithStats(t *testing.T) { body: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"foo":"bar"},"values":[[1536673680,"137"],[1536673780,"137"]]}],"stats":{"samples":{"totalQueryableSamples":10,"totalQueryableSamplesPerStep":[[1536673680,5],[1536673780,5]]}},"analysis":{"name":"[noArgFunction]","executionTime":"1s","children":null}}}`, expected: &PrometheusResponse{ Status: "success", - Data: &PrometheusData{ + Data: PrometheusData{ Analysis: &Analysis{ - Name: "[noArgFunction]", - ExecutionTime: &Duration{ - Seconds: 1, - }, + Name: "[noArgFunction]", + ExecutionTime: Duration(1 * time.Second), }, ResultType: model.ValMatrix.String(), - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{ - {Name: []byte("foo"), Value: []byte("bar")}, + Labels: []cortexpb.LabelAdapter{ + {Name: "foo", Value: "bar"}, }, - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {Value: 137, TimestampMs: 1536673680000}, {Value: 137, TimestampMs: 1536673780000}, }, @@ -228,10 +227,10 @@ func TestMergeAPIResponses(t *testing.T) { input: []Response{}, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: (*Analysis)(nil), - Result: []*SampleStream{}, + Result: []SampleStream{}, }, }, }, @@ -240,18 +239,18 @@ func TestMergeAPIResponses(t *testing.T) { name: "A single empty response shouldn't panic.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, }, }, }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{}, + Result: []SampleStream{}, }, }, }, @@ -260,24 +259,24 @@ func TestMergeAPIResponses(t *testing.T) { name: "Multiple empty responses shouldn't panic.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, }, }, &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, }, }, }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{}, + Result: []SampleStream{}, }, }, }, @@ -286,18 +285,16 @@ func TestMergeAPIResponses(t *testing.T) { name: "Basic merging of two responses.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{ - Name: "foo", - ExecutionTime: &Duration{ - Seconds: 1, - }, + Name: "foo", + ExecutionTime: Duration(1 * time.Second), }, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, }, @@ -306,18 +303,16 @@ func TestMergeAPIResponses(t *testing.T) { }, }, &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{ - Name: "foo", - ExecutionTime: &Duration{ - Seconds: 1, - }, + Name: "foo", + ExecutionTime: Duration(1 * time.Second), }, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 2, TimestampMs: 2}, {Value: 3, TimestampMs: 3}, }, @@ -328,18 +323,16 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{ - Name: "foo", - ExecutionTime: &Duration{ - Seconds: 2, - }, + Name: "foo", + ExecutionTime: Duration(2 * time.Second), }, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, {Value: 2, TimestampMs: 2}, @@ -355,21 +348,17 @@ func TestMergeAPIResponses(t *testing.T) { name: "Basic merging of two responses with nested analysis trees.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{ - Name: "foo", - Children: []*Analysis{{Name: "bar", ExecutionTime: &Duration{ - Seconds: 1, - }}}, - ExecutionTime: &Duration{ - Seconds: 1, - }, + Name: "foo", + Children: []*Analysis{{Name: "bar", ExecutionTime: Duration(1 * time.Second)}}, + ExecutionTime: Duration(1 * time.Second), }, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, }, @@ -378,21 +367,17 @@ func TestMergeAPIResponses(t *testing.T) { }, }, &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{ - Name: "foo", - Children: []*Analysis{{Name: "bar", ExecutionTime: &Duration{ - Seconds: 1, - }}}, - ExecutionTime: &Duration{ - Seconds: 1, - }, + Name: "foo", + Children: []*Analysis{{Name: "bar", ExecutionTime: Duration(1 * time.Second)}}, + ExecutionTime: Duration(1 * time.Second), }, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 2, TimestampMs: 2}, {Value: 3, TimestampMs: 3}, }, @@ -403,21 +388,17 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{ - Name: "foo", - Children: []*Analysis{{Name: "bar", ExecutionTime: &Duration{ - Seconds: 2, - }}}, - ExecutionTime: &Duration{ - Seconds: 2, - }, + Name: "foo", + Children: []*Analysis{{Name: "bar", ExecutionTime: Duration(2 * time.Second)}}, + ExecutionTime: Duration(2 * time.Second), }, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, {Value: 2, TimestampMs: 2}, @@ -437,13 +418,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1000}, {Value: 2, TimestampMs: 2000}, @@ -463,13 +444,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 1, TimestampMs: 1000}, {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, @@ -487,13 +468,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ Analysis: &Analysis{}, ResultType: matrix, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 1, TimestampMs: 1000}, {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, @@ -513,13 +494,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, {Value: 4, TimestampMs: 4000}, @@ -534,19 +515,19 @@ func TestMergeAPIResponses(t *testing.T) { name: "[stats] A single empty response shouldn't panic.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, Stats: &PrometheusResponseStats{Samples: &PrometheusResponseSamplesStats{}}, }, }, }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{}, + Result: []SampleStream{}, Stats: &PrometheusResponseStats{Samples: &PrometheusResponseSamplesStats{}}, }, }, @@ -556,26 +537,26 @@ func TestMergeAPIResponses(t *testing.T) { name: "[stats] Multiple empty responses shouldn't panic.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, Stats: &PrometheusResponseStats{Samples: &PrometheusResponseSamplesStats{}}, }, }, &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, Stats: &PrometheusResponseStats{Samples: &PrometheusResponseSamplesStats{}}, }, }, }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ Analysis: &Analysis{}, ResultType: matrix, - Result: []*SampleStream{}, + Result: []SampleStream{}, Stats: &PrometheusResponseStats{Samples: &PrometheusResponseSamplesStats{}}, }, }, @@ -585,12 +566,12 @@ func TestMergeAPIResponses(t *testing.T) { name: "[stats] Basic merging of two responses.", input: []Response{ &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, }, @@ -606,13 +587,13 @@ func TestMergeAPIResponses(t *testing.T) { }, }, &PrometheusResponse{ - Data: &PrometheusData{ + Data: PrometheusData{ Analysis: &Analysis{}, ResultType: matrix, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 2, TimestampMs: 2}, {Value: 3, TimestampMs: 3}, }, @@ -630,13 +611,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ Analysis: &Analysis{}, ResultType: matrix, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, {Value: 2, TimestampMs: 2}, @@ -664,13 +645,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 1, TimestampMs: 1000}, {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, @@ -697,13 +678,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 1, TimestampMs: 1000}, {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, @@ -737,13 +718,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 1, TimestampMs: 1000}, {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, @@ -773,13 +754,13 @@ func TestMergeAPIResponses(t *testing.T) { }, expected: &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{{Name: []byte("a"), Value: []byte("b")}, {Name: []byte("c"), Value: []byte("d")}}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + Samples: []cortexpb.Sample{ {Value: 2, TimestampMs: 2000}, {Value: 3, TimestampMs: 3000}, {Value: 4, TimestampMs: 4000}, diff --git a/internal/cortex/querier/queryrange/queryrange.pb.go b/internal/cortex/querier/queryrange/queryrange.pb.go index dfcf74aa52..9ce28b017a 100644 --- a/internal/cortex/querier/queryrange/queryrange.pb.go +++ b/internal/cortex/querier/queryrange/queryrange.pb.go @@ -6,19 +6,25 @@ package queryrange import ( encoding_binary "encoding/binary" fmt "fmt" - io "io" - math "math" - math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" + github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" types "github.com/gogo/protobuf/types" cortexpb "github.com/thanos-io/thanos/internal/cortex/cortexpb" + github_com_thanos_io_thanos_internal_cortex_cortexpb "github.com/thanos-io/thanos/internal/cortex/cortexpb" + + io "io" + math "math" + math_bits "math/bits" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -27,9 +33,7 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type PrometheusRequestHeader struct { - // @gotags: json:"-" - Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"-"` - // @gotags: json:"-" + Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"-"` Values []string `protobuf:"bytes,2,rep,name=Values,proto3" json:"-"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -84,14 +88,13 @@ func (m *PrometheusRequestHeader) GetValues() []string { } type PrometheusRequest struct { - Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` - Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` - End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` - Step int64 `protobuf:"varint,4,opt,name=step,proto3" json:"step,omitempty"` - Timeout *Duration `protobuf:"bytes,5,opt,name=timeout,proto3" json:"timeout,omitempty"` - Query string `protobuf:"bytes,6,opt,name=query,proto3" json:"query,omitempty"` - CachingOptions *CachingOptions `protobuf:"bytes,7,opt,name=cachingOptions,proto3" json:"cachingOptions,omitempty"` - // @gotags: json:"-" + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` + End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` + Step int64 `protobuf:"varint,4,opt,name=step,proto3" json:"step,omitempty"` + Timeout time.Duration `protobuf:"bytes,5,opt,name=timeout,proto3,stdduration" json:"timeout"` + Query string `protobuf:"bytes,6,opt,name=query,proto3" json:"query,omitempty"` + CachingOptions CachingOptions `protobuf:"bytes,7,opt,name=cachingOptions,proto3" json:"cachingOptions"` Headers []*PrometheusRequestHeader `protobuf:"bytes,8,rep,name=Headers,proto3" json:"-"` Stats string `protobuf:"bytes,9,opt,name=stats,proto3" json:"stats,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -160,11 +163,11 @@ func (m *PrometheusRequest) GetStep() int64 { return 0 } -func (m *PrometheusRequest) GetTimeout() *Duration { +func (m *PrometheusRequest) GetTimeout() time.Duration { if m != nil { return m.Timeout } - return nil + return 0 } func (m *PrometheusRequest) GetQuery() string { @@ -174,11 +177,11 @@ func (m *PrometheusRequest) GetQuery() string { return "" } -func (m *PrometheusRequest) GetCachingOptions() *CachingOptions { +func (m *PrometheusRequest) GetCachingOptions() CachingOptions { if m != nil { return m.CachingOptions } - return nil + return CachingOptions{} } func (m *PrometheusRequest) GetHeaders() []*PrometheusRequestHeader { @@ -196,9 +199,7 @@ func (m *PrometheusRequest) GetStats() string { } type PrometheusResponseHeader struct { - // @gotags: json:"-" - Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"-"` - // @gotags: json:"-" + Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"-"` Values []string `protobuf:"bytes,2,rep,name=Values,proto3" json:"-"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -253,21 +254,15 @@ func (m *PrometheusResponseHeader) GetValues() []string { } type PrometheusResponse struct { - // @gotags: json:"status" - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - // @gotags: json:"data,omitempty" - Data *PrometheusData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` - // @gotags: json:"errorType,omitempty" - ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` - // @gotags: json:"error,omitempty" - Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` - // @gotags: json:"-" - Headers []*PrometheusResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` - // @gotags: json:"warnings,omitempty" - Warnings []string `protobuf:"bytes,6,rep,name=Warnings,proto3" json:"warnings,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data PrometheusData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` + ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` + Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` + Headers []*PrometheusResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` + Warnings []string `protobuf:"bytes,6,rep,name=Warnings,proto3" json:"warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *PrometheusResponse) Reset() { *m = PrometheusResponse{} } @@ -310,11 +305,11 @@ func (m *PrometheusResponse) GetStatus() string { return "" } -func (m *PrometheusResponse) GetData() *PrometheusData { +func (m *PrometheusResponse) GetData() PrometheusData { if m != nil { return m.Data } - return nil + return PrometheusData{} } func (m *PrometheusResponse) GetErrorType() string { @@ -346,17 +341,13 @@ func (m *PrometheusResponse) GetWarnings() []string { } type PrometheusData struct { - // @gotags: json:"resultType" - ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` - // @gotags: json:"result" - Result []*SampleStream `protobuf:"bytes,2,rep,name=Result,proto3" json:"result"` - // @gotags: json:"stats,omitempty" - Stats *PrometheusResponseStats `protobuf:"bytes,3,opt,name=stats,proto3" json:"stats,omitempty"` - // @gotags: json:"analysis" - Analysis *Analysis `protobuf:"bytes,4,opt,name=analysis,proto3" json:"analysis"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` + Result []SampleStream `protobuf:"bytes,2,rep,name=Result,proto3" json:"result"` + Stats *PrometheusResponseStats `protobuf:"bytes,3,opt,name=stats,proto3" json:"stats,omitempty"` + Analysis *Analysis `protobuf:"bytes,4,opt,name=analysis,proto3" json:"analysis"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *PrometheusData) Reset() { *m = PrometheusData{} } @@ -399,7 +390,7 @@ func (m *PrometheusData) GetResultType() string { return "" } -func (m *PrometheusData) GetResult() []*SampleStream { +func (m *PrometheusData) GetResult() []SampleStream { if m != nil { return m.Result } @@ -421,21 +412,15 @@ func (m *PrometheusData) GetAnalysis() *Analysis { } type PrometheusInstantQueryResponse struct { - // @gotags: json:"status" - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - // @gotags: json:"data,omitempty" - Data *PrometheusInstantQueryData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` - // @gotags: json:"errorType,omitempty" - ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` - // @gotags: json:"error,omitempty" - Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` - // @gotags: json:"-" - Headers []*PrometheusResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` - // @gotags: json:"warnings,omitempty" - Warnings []string `protobuf:"bytes,6,rep,name=Warnings,proto3" json:"warnings,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data PrometheusInstantQueryData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` + ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` + Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` + Headers []*PrometheusResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` + Warnings []string `protobuf:"bytes,6,rep,name=Warnings,proto3" json:"warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *PrometheusInstantQueryResponse) Reset() { *m = PrometheusInstantQueryResponse{} } @@ -478,11 +463,11 @@ func (m *PrometheusInstantQueryResponse) GetStatus() string { return "" } -func (m *PrometheusInstantQueryResponse) GetData() *PrometheusInstantQueryData { +func (m *PrometheusInstantQueryResponse) GetData() PrometheusInstantQueryData { if m != nil { return m.Data } - return nil + return PrometheusInstantQueryData{} } func (m *PrometheusInstantQueryResponse) GetErrorType() string { @@ -514,17 +499,13 @@ func (m *PrometheusInstantQueryResponse) GetWarnings() []string { } type PrometheusInstantQueryData struct { - // @gotags: json:"resultType" - ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` - // @gotags: json:"result" - Result *PrometheusInstantQueryResult `protobuf:"bytes,2,opt,name=Result,proto3" json:"result"` - // @gotags: json:"stats,omitempty" - Stats *PrometheusResponseStats `protobuf:"bytes,3,opt,name=stats,proto3" json:"stats,omitempty"` - // @gotags: json:"analysis" - Analysis *Analysis `protobuf:"bytes,4,opt,name=analysis,proto3" json:"analysis"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` + Result PrometheusInstantQueryResult `protobuf:"bytes,2,opt,name=Result,proto3" json:"result"` + Stats *PrometheusResponseStats `protobuf:"bytes,3,opt,name=stats,proto3" json:"stats,omitempty"` + Analysis *Analysis `protobuf:"bytes,4,opt,name=analysis,proto3" json:"analysis"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *PrometheusInstantQueryData) Reset() { *m = PrometheusInstantQueryData{} } @@ -567,11 +548,11 @@ func (m *PrometheusInstantQueryData) GetResultType() string { return "" } -func (m *PrometheusInstantQueryData) GetResult() *PrometheusInstantQueryResult { +func (m *PrometheusInstantQueryData) GetResult() PrometheusInstantQueryResult { if m != nil { return m.Result } - return nil + return PrometheusInstantQueryResult{} } func (m *PrometheusInstantQueryData) GetStats() *PrometheusResponseStats { @@ -798,7 +779,6 @@ func (m *Matrix) GetSampleStreams() []*SampleStream { } type PrometheusResponseStats struct { - // @gotags: json:"samples" Samples *PrometheusResponseSamplesStats `protobuf:"bytes,1,opt,name=samples,proto3" json:"samples"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -846,9 +826,7 @@ func (m *PrometheusResponseStats) GetSamples() *PrometheusResponseSamplesStats { } type PrometheusResponseSamplesStats struct { - // @gotags: json:"totalQueryableSamples" - TotalQueryableSamples int64 `protobuf:"varint,1,opt,name=totalQueryableSamples,proto3" json:"totalQueryableSamples"` - // @gotags: json:"totalQueryableSamplesPerStep" + TotalQueryableSamples int64 `protobuf:"varint,1,opt,name=totalQueryableSamples,proto3" json:"totalQueryableSamples"` TotalQueryableSamplesPerStep []*PrometheusResponseQueryableSamplesStatsPerStep `protobuf:"bytes,2,rep,name=totalQueryableSamplesPerStep,proto3" json:"totalQueryableSamplesPerStep"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -962,15 +940,12 @@ func (m *PrometheusResponseQueryableSamplesStatsPerStep) GetTimestampMs() int64 } type SampleStream struct { - // @gotags: json:"metric" - Labels []*cortexpb.LabelPair `protobuf:"bytes,1,rep,name=labels,proto3" json:"metric"` - // @gotags: json:"values" - Samples []*cortexpb.Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"values"` - // @gotags: json:"histogram" - Histograms []*SampleHistogramPair `protobuf:"bytes,3,rep,name=histograms,proto3" json:"histogram"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []github_com_thanos_io_thanos_internal_cortex_cortexpb.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/thanos-io/thanos/internal/cortex/cortexpb.LabelAdapter" json:"metric"` + Samples []cortexpb.Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"values"` + Histograms []SampleHistogramPair `protobuf:"bytes,3,rep,name=histograms,proto3" json:"histogram"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *SampleStream) Reset() { *m = SampleStream{} } @@ -1006,21 +981,14 @@ func (m *SampleStream) XXX_DiscardUnknown() { var xxx_messageInfo_SampleStream proto.InternalMessageInfo -func (m *SampleStream) GetLabels() []*cortexpb.LabelPair { - if m != nil { - return m.Labels - } - return nil -} - -func (m *SampleStream) GetSamples() []*cortexpb.Sample { +func (m *SampleStream) GetSamples() []cortexpb.Sample { if m != nil { return m.Samples } return nil } -func (m *SampleStream) GetHistograms() []*SampleHistogramPair { +func (m *SampleStream) GetHistograms() []SampleHistogramPair { if m != nil { return m.Histograms } @@ -1028,16 +996,13 @@ func (m *SampleStream) GetHistograms() []*SampleHistogramPair { } type Sample struct { - // @gotags: json:"metric" - Labels []*cortexpb.LabelPair `protobuf:"bytes,1,rep,name=labels,proto3" json:"metric"` - // @gotags: json:"value" - SampleValue float64 `protobuf:"fixed64,2,opt,name=sampleValue,proto3" json:"value"` - Timestamp int64 `protobuf:"varint,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - // @gotags: json:"histogram" - Histogram *SampleHistogram `protobuf:"bytes,4,opt,name=histogram,proto3" json:"histogram"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []github_com_thanos_io_thanos_internal_cortex_cortexpb.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/thanos-io/thanos/internal/cortex/cortexpb.LabelAdapter" json:"metric"` + SampleValue float64 `protobuf:"fixed64,2,opt,name=sampleValue,proto3" json:"value"` + Timestamp int64 `protobuf:"varint,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Histogram *SampleHistogram `protobuf:"bytes,4,opt,name=histogram,proto3" json:"histogram"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Sample) Reset() { *m = Sample{} } @@ -1073,13 +1038,6 @@ func (m *Sample) XXX_DiscardUnknown() { var xxx_messageInfo_Sample proto.InternalMessageInfo -func (m *Sample) GetLabels() []*cortexpb.LabelPair { - if m != nil { - return m.Labels - } - return nil -} - func (m *Sample) GetSampleValue() float64 { if m != nil { return m.SampleValue @@ -1157,11 +1115,11 @@ func (m *StringSample) GetTimestampMs() int64 { } type SampleHistogramPair struct { - Timestamp int64 `protobuf:"varint,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - Histogram *SampleHistogram `protobuf:"bytes,2,opt,name=histogram,proto3" json:"histogram,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timestamp int64 `protobuf:"varint,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Histogram SampleHistogram `protobuf:"bytes,2,opt,name=histogram,proto3" json:"histogram"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *SampleHistogramPair) Reset() { *m = SampleHistogramPair{} } @@ -1204,11 +1162,11 @@ func (m *SampleHistogramPair) GetTimestamp() int64 { return 0 } -func (m *SampleHistogramPair) GetHistogram() *SampleHistogram { +func (m *SampleHistogramPair) GetHistogram() SampleHistogram { if m != nil { return m.Histogram } - return nil + return SampleHistogram{} } type SampleHistogram struct { @@ -1346,14 +1304,12 @@ func (m *HistogramBucket) GetCount() float64 { } type CachedResponse struct { - // @gotags: json:"key" Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key"` // List of cached responses; non-overlapping and in order. - // @gotags: json:"extents" - Extents []*Extent `protobuf:"bytes,2,rep,name=extents,proto3" json:"extents"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Extents []Extent `protobuf:"bytes,2,rep,name=extents,proto3" json:"extents"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CachedResponse) Reset() { *m = CachedResponse{} } @@ -1396,7 +1352,7 @@ func (m *CachedResponse) GetKey() string { return "" } -func (m *CachedResponse) GetExtents() []*Extent { +func (m *CachedResponse) GetExtents() []Extent { if m != nil { return m.Extents } @@ -1404,13 +1360,9 @@ func (m *CachedResponse) GetExtents() []*Extent { } type Extent struct { - // @gotags: json:"start" - Start int64 `protobuf:"varint,1,opt,name=start,proto3" json:"start"` - // @gotags: json:"end" - End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end"` - // @gotags: json:"-" - TraceId string `protobuf:"bytes,4,opt,name=trace_id,json=traceId,proto3" json:"-"` - // @gotags: json:"response" + Start int64 `protobuf:"varint,1,opt,name=start,proto3" json:"start"` + End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end"` + TraceId string `protobuf:"bytes,4,opt,name=trace_id,json=traceId,proto3" json:"-"` Response *types.Any `protobuf:"bytes,5,opt,name=response,proto3" json:"response"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1526,9 +1478,7 @@ func (m *CachingOptions) GetDisabled() bool { } type Explanation struct { - // @gotags: json:"name" - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` - // @gotags: json:"children" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` Children []*Explanation `protobuf:"bytes,2,rep,name=children,proto3" json:"children"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1582,134 +1532,9 @@ func (m *Explanation) GetChildren() []*Explanation { return nil } -// A Duration represents a signed, fixed-length span of time represented -// as a count of seconds and fractions of seconds at nanosecond -// resolution. It is independent of any calendar and concepts like "day" -// or "month". It is related to Timestamp in that the difference between -// two Timestamp values is a Duration and it can be added or subtracted -// from a Timestamp. Range is approximately +-10,000 years. -// -// # Examples -// -// Example 1: Compute Duration from two Timestamps in pseudo code. -// -// Timestamp start = ...; -// Timestamp end = ...; -// Duration duration = ...; -// -// duration.seconds = end.seconds - start.seconds; -// duration.nanos = end.nanos - start.nanos; -// -// if (duration.seconds < 0 && duration.nanos > 0) { -// duration.seconds += 1; -// duration.nanos -= 1000000000; -// } else if (duration.seconds > 0 && duration.nanos < 0) { -// duration.seconds -= 1; -// duration.nanos += 1000000000; -// } -// -// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. -// -// Timestamp start = ...; -// Duration duration = ...; -// Timestamp end = ...; -// -// end.seconds = start.seconds + duration.seconds; -// end.nanos = start.nanos + duration.nanos; -// -// if (end.nanos < 0) { -// end.seconds -= 1; -// end.nanos += 1000000000; -// } else if (end.nanos >= 1000000000) { -// end.seconds += 1; -// end.nanos -= 1000000000; -// } -// -// Example 3: Compute Duration from datetime.timedelta in Python. -// -// td = datetime.timedelta(days=3, minutes=10) -// duration = Duration() -// duration.FromTimedelta(td) -// -// # JSON Mapping -// -// In JSON format, the Duration type is encoded as a string rather than an -// object, where the string ends in the suffix "s" (indicating seconds) and -// is preceded by the number of seconds, with nanoseconds expressed as -// fractional seconds. For example, 3 seconds with 0 nanoseconds should be -// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -// microsecond should be expressed in JSON format as "3.000001s". -type Duration struct { - // Signed seconds of the span of time. Must be from -315,576,000,000 - // to +315,576,000,000 inclusive. Note: these bounds are computed from: - // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` - // Signed fractions of a second at nanosecond resolution of the span - // of time. Durations less than one second are represented with a 0 - // `seconds` field and a positive or negative `nanos` field. For durations - // of one second or more, a non-zero value for the `nanos` field must be - // of the same sign as the `seconds` field. Must be from -999,999,999 - // to +999,999,999 inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Duration) Reset() { *m = Duration{} } -func (m *Duration) String() string { return proto.CompactTextString(m) } -func (*Duration) ProtoMessage() {} -func (*Duration) Descriptor() ([]byte, []int) { - return fileDescriptor_9af7607b46ac39b7, []int{23} -} -func (m *Duration) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *Duration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Duration.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *Duration) XXX_Merge(src proto.Message) { - xxx_messageInfo_Duration.Merge(m, src) -} -func (m *Duration) XXX_Size() int { - return m.Size() -} -func (m *Duration) XXX_DiscardUnknown() { - xxx_messageInfo_Duration.DiscardUnknown(m) -} - -var xxx_messageInfo_Duration proto.InternalMessageInfo - -func (m *Duration) GetSeconds() int64 { - if m != nil { - return m.Seconds - } - return 0 -} - -func (m *Duration) GetNanos() int32 { - if m != nil { - return m.Nanos - } - return 0 -} - type Analysis struct { - // @gotags: json:"name" - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` - // @gotags: json:"executionTime" - ExecutionTime *Duration `protobuf:"bytes,2,opt,name=executionTime,proto3" json:"executionTime"` - // @gotags: json:"children" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` + ExecutionTime Duration `protobuf:"bytes,2,opt,name=executionTime,proto3,customtype=Duration" json:"executionTime"` Children []*Analysis `protobuf:"bytes,3,rep,name=children,proto3" json:"children"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1720,7 +1545,7 @@ func (m *Analysis) Reset() { *m = Analysis{} } func (m *Analysis) String() string { return proto.CompactTextString(m) } func (*Analysis) ProtoMessage() {} func (*Analysis) Descriptor() ([]byte, []int) { - return fileDescriptor_9af7607b46ac39b7, []int{24} + return fileDescriptor_9af7607b46ac39b7, []int{23} } func (m *Analysis) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1756,13 +1581,6 @@ func (m *Analysis) GetName() string { return "" } -func (m *Analysis) GetExecutionTime() *Duration { - if m != nil { - return m.ExecutionTime - } - return nil -} - func (m *Analysis) GetChildren() []*Analysis { if m != nil { return m.Children @@ -1794,7 +1612,6 @@ func init() { proto.RegisterType((*Extent)(nil), "queryrange.Extent") proto.RegisterType((*CachingOptions)(nil), "queryrange.CachingOptions") proto.RegisterType((*Explanation)(nil), "queryrange.Explanation") - proto.RegisterType((*Duration)(nil), "queryrange.Duration") proto.RegisterType((*Analysis)(nil), "queryrange.Analysis") } @@ -1803,85 +1620,102 @@ func init() { } var fileDescriptor_9af7607b46ac39b7 = []byte{ - // 1235 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x57, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xef, 0xda, 0xce, 0xda, 0x7e, 0x4e, 0xdb, 0x30, 0x2d, 0x74, 0x6b, 0xa2, 0x60, 0xb6, 0x08, - 0x59, 0xa1, 0x72, 0xab, 0x14, 0x90, 0x1a, 0x89, 0x42, 0x43, 0x03, 0x29, 0xa2, 0x10, 0x26, 0x55, - 0x90, 0xb8, 0x54, 0x63, 0x7b, 0x70, 0x56, 0x5d, 0xcf, 0x6e, 0x67, 0x66, 0x4b, 0x7c, 0xe1, 0x02, - 0x47, 0x3e, 0x08, 0x47, 0x3e, 0x03, 0x27, 0x6e, 0x70, 0xe2, 0x8c, 0x7a, 0x85, 0x03, 0x1f, 0x01, - 0xcd, 0x9f, 0xdd, 0x9d, 0x75, 0x1c, 0x47, 0xe1, 0x02, 0xa7, 0x9d, 0x37, 0xef, 0x37, 0x6f, 0x7e, - 0xf3, 0x7b, 0xf3, 0xe7, 0x2d, 0x6c, 0x8e, 0x12, 0x2e, 0xe9, 0xf1, 0xad, 0x67, 0x19, 0xe5, 0x11, - 0xe5, 0xfa, 0x3b, 0xe3, 0x84, 0x4d, 0xa8, 0xd3, 0x1c, 0xa4, 0x3c, 0x91, 0x09, 0x82, 0xb2, 0xa7, - 0xbb, 0x6e, 0xc7, 0x99, 0x4f, 0x3a, 0xb4, 0x0d, 0x83, 0xec, 0x5e, 0x9f, 0x24, 0xc9, 0x24, 0xa6, - 0xb7, 0xb4, 0x35, 0xcc, 0xbe, 0xbe, 0x45, 0xd8, 0xcc, 0xb8, 0xc2, 0x5d, 0xb8, 0xb6, 0xcf, 0x93, - 0x29, 0x95, 0x47, 0x34, 0x13, 0x98, 0x3e, 0xcb, 0xa8, 0x90, 0x7b, 0x94, 0x8c, 0x29, 0x47, 0x08, - 0x1a, 0x9f, 0x91, 0x29, 0x0d, 0xbc, 0x9e, 0xd7, 0x6f, 0x63, 0xdd, 0x46, 0xaf, 0x80, 0x7f, 0x48, - 0xe2, 0x8c, 0x8a, 0xa0, 0xd6, 0xab, 0xf7, 0xdb, 0xd8, 0x5a, 0xe1, 0xcf, 0x35, 0x78, 0xe9, 0x44, - 0x1c, 0x15, 0x21, 0x25, 0xf2, 0x28, 0x8f, 0xa0, 0xda, 0xe8, 0x2a, 0xac, 0x08, 0x49, 0xb8, 0x0c, - 0x6a, 0x3d, 0xaf, 0x5f, 0xc7, 0xc6, 0x40, 0x6b, 0x50, 0xa7, 0x6c, 0x1c, 0xd4, 0x75, 0x9f, 0x6a, - 0xaa, 0xb1, 0x42, 0xd2, 0x34, 0x68, 0xe8, 0x2e, 0xdd, 0x46, 0x03, 0x68, 0xca, 0x68, 0x4a, 0x93, - 0x4c, 0x06, 0x2b, 0x3d, 0xaf, 0xdf, 0xd9, 0xba, 0x3a, 0x70, 0x54, 0x79, 0x90, 0x71, 0x22, 0xa3, - 0x84, 0xe1, 0x1c, 0xa4, 0xe6, 0xd2, 0xfe, 0xc0, 0xd7, 0x04, 0x8c, 0x81, 0x76, 0xe0, 0xd2, 0x88, - 0x8c, 0x8e, 0x22, 0x36, 0xf9, 0x3c, 0x55, 0x78, 0x11, 0x34, 0x75, 0xb0, 0xae, 0x1b, 0xec, 0xc3, - 0x0a, 0x02, 0xcf, 0x8d, 0x40, 0xef, 0x41, 0xd3, 0xa8, 0x24, 0x82, 0x56, 0xaf, 0xde, 0xef, 0x6c, - 0xdd, 0x70, 0x07, 0x9f, 0xa2, 0x28, 0xce, 0xc7, 0x58, 0x11, 0xa4, 0x08, 0xda, 0x86, 0x98, 0x36, - 0xc2, 0x8f, 0x20, 0x70, 0x47, 0x8a, 0x34, 0x61, 0x82, 0xfe, 0x8b, 0x64, 0xfc, 0xe5, 0x01, 0x3a, - 0x19, 0x48, 0xc1, 0x0f, 0x24, 0x91, 0x99, 0xb0, 0x41, 0xac, 0x85, 0x06, 0xd0, 0x78, 0x40, 0x24, - 0xd1, 0x09, 0x99, 0x53, 0xa1, 0x8c, 0xa2, 0x10, 0x58, 0xe3, 0xd0, 0x3a, 0xb4, 0x77, 0x39, 0x4f, - 0xf8, 0xe3, 0x59, 0x4a, 0x75, 0xc6, 0xda, 0xb8, 0xec, 0x50, 0x4b, 0xd3, 0x86, 0x4e, 0x5c, 0x1b, - 0x1b, 0x03, 0xdd, 0x2b, 0xf5, 0x5a, 0xd1, 0x7a, 0xbd, 0x71, 0x9a, 0x5e, 0xee, 0xaa, 0x4b, 0xc1, - 0xba, 0xd0, 0xfa, 0x92, 0x70, 0x16, 0xb1, 0x89, 0x08, 0x7c, 0xbd, 0xd8, 0xc2, 0x0e, 0x7f, 0xf5, - 0xe0, 0x52, 0x95, 0x28, 0xda, 0x00, 0xc0, 0x54, 0x64, 0xb1, 0xd4, 0x1c, 0xcd, 0x72, 0x9d, 0x1e, - 0x74, 0x1b, 0x7c, 0x63, 0x69, 0xe5, 0x3a, 0x5b, 0x81, 0xcb, 0xe6, 0x80, 0x4c, 0xd3, 0x98, 0x1e, - 0x48, 0x4e, 0xc9, 0x14, 0x5b, 0x1c, 0xba, 0x9b, 0x67, 0xac, 0xae, 0x55, 0xba, 0xb1, 0x9c, 0xbe, - 0x52, 0x56, 0xd8, 0xb4, 0xa2, 0xdb, 0xd0, 0x22, 0x8c, 0xc4, 0x33, 0x11, 0x09, 0x2d, 0xca, 0xdc, - 0xb6, 0xbd, 0x6f, 0x7d, 0xb8, 0x40, 0x85, 0xdf, 0xd7, 0x60, 0xa3, 0x0c, 0xfa, 0x90, 0x09, 0x49, - 0x98, 0xfc, 0x42, 0x0d, 0x39, 0x33, 0x99, 0xdb, 0x95, 0x64, 0xbe, 0xb9, 0x98, 0xa6, 0x1b, 0xf1, - 0x7f, 0x9c, 0xd8, 0x3f, 0x3d, 0xe8, 0x9e, 0x4e, 0xfa, 0xcc, 0x24, 0x7f, 0xe0, 0x24, 0x59, 0x89, - 0xd1, 0x3f, 0x5b, 0x0c, 0x83, 0xff, 0x6f, 0x92, 0xfe, 0xb7, 0x07, 0xeb, 0xcb, 0x58, 0xa1, 0x4d, - 0xf0, 0xc5, 0x88, 0xc4, 0x84, 0xeb, 0xb5, 0x76, 0xb6, 0xd6, 0x06, 0xf9, 0x6d, 0x6f, 0xb7, 0xec, - 0xde, 0x05, 0x6c, 0x11, 0xe8, 0x1e, 0xac, 0x0a, 0xc9, 0x23, 0x36, 0x31, 0x1e, 0xab, 0x40, 0x75, - 0x9b, 0x3b, 0xfe, 0xbd, 0x0b, 0xb8, 0x82, 0x47, 0x37, 0xc1, 0x7f, 0x4e, 0x47, 0x32, 0xe1, 0x76, - 0xe9, 0xc8, 0x1d, 0x79, 0xa8, 0x3d, 0x6a, 0x36, 0x83, 0x51, 0xe8, 0x29, 0x91, 0x3c, 0x3a, 0xb6, - 0x4b, 0xad, 0xa0, 0x1f, 0x69, 0x8f, 0x42, 0x1b, 0xcc, 0x4e, 0x0b, 0x7c, 0xae, 0x57, 0x14, 0xbe, - 0x0b, 0xfe, 0x61, 0x1e, 0xa1, 0x29, 0xf4, 0xcc, 0x6a, 0x3f, 0xd7, 0xe7, 0x43, 0x18, 0x52, 0x38, - 0x87, 0x84, 0x7b, 0xe0, 0x9b, 0xa8, 0xe8, 0x1e, 0x5c, 0x14, 0xce, 0x71, 0xcd, 0x47, 0x9f, 0x7e, - 0x9e, 0xab, 0xf0, 0xf0, 0x49, 0xf5, 0xf9, 0x73, 0x12, 0x89, 0x1e, 0xb8, 0x94, 0xd4, 0xaa, 0x36, - 0xcf, 0x48, 0xbf, 0x01, 0x9b, 0x5d, 0x50, 0x50, 0xfd, 0xdd, 0x73, 0x8f, 0xf2, 0x22, 0x2c, 0x7a, - 0x1b, 0x5e, 0x96, 0x89, 0x24, 0xb1, 0xce, 0x35, 0x19, 0xc6, 0xb9, 0x57, 0x4f, 0x5b, 0xc7, 0x8b, - 0x9d, 0xe8, 0x5b, 0x58, 0x5f, 0xe8, 0xd8, 0xa7, 0xfc, 0x40, 0xbd, 0x9b, 0xe6, 0x62, 0xdb, 0x5e, - 0xce, 0x79, 0x7e, 0xb0, 0x26, 0x64, 0x23, 0xe0, 0xa5, 0xf1, 0xc3, 0x08, 0xce, 0x19, 0x4f, 0x5d, - 0x20, 0xcf, 0xd5, 0x03, 0x65, 0xd7, 0x65, 0x0c, 0xf4, 0x3a, 0xac, 0xaa, 0xe7, 0x5a, 0x48, 0x32, - 0x4d, 0x9f, 0x4c, 0x85, 0x2d, 0x0b, 0x3a, 0x45, 0xdf, 0x23, 0x11, 0xfe, 0xe8, 0xc1, 0xaa, 0x9b, - 0x44, 0xf4, 0x16, 0xf8, 0x31, 0x19, 0xd2, 0x38, 0x4f, 0xf7, 0x95, 0xf2, 0x24, 0x7c, 0xaa, 0xfa, - 0xf7, 0x49, 0xc4, 0xb1, 0x85, 0xa0, 0xcd, 0x32, 0x8f, 0x46, 0x93, 0x13, 0xe7, 0xa6, 0xc8, 0x16, - 0x7a, 0x1f, 0xe0, 0x28, 0x12, 0x32, 0x99, 0x70, 0xb5, 0x97, 0xea, 0x1a, 0xfe, 0xda, 0xc9, 0xbd, - 0xb4, 0x97, 0x63, 0xf4, 0x44, 0xce, 0x90, 0xf0, 0x27, 0x0f, 0x7c, 0x7b, 0x84, 0xce, 0x45, 0xb2, - 0x07, 0x1d, 0xc3, 0x41, 0x3f, 0xe1, 0x5a, 0x04, 0x0f, 0xbb, 0x5d, 0xea, 0x72, 0x2e, 0x34, 0xb1, - 0x75, 0x52, 0xd9, 0x81, 0xee, 0x42, 0xbb, 0x60, 0x61, 0x0f, 0xe1, 0xab, 0x4b, 0x78, 0xe3, 0x12, - 0x1d, 0x7e, 0x0c, 0xab, 0xee, 0x55, 0x50, 0x4d, 0x53, 0xfb, 0x1c, 0x69, 0x62, 0x70, 0x65, 0x81, - 0x3c, 0x55, 0xe2, 0xde, 0x52, 0xe2, 0xb5, 0x73, 0x11, 0x4f, 0xe1, 0xf2, 0x9c, 0x57, 0x71, 0x1f, - 0x25, 0x19, 0x93, 0x7a, 0x1e, 0x0f, 0x1b, 0x43, 0x15, 0x97, 0x22, 0x9b, 0x5a, 0x51, 0x55, 0x13, - 0xbd, 0x03, 0xcd, 0x61, 0x36, 0x7a, 0x4a, 0x65, 0x9e, 0xe4, 0xca, 0x9c, 0x45, 0xbc, 0x1d, 0x8d, - 0xc1, 0x39, 0x36, 0x14, 0x70, 0x79, 0xce, 0xa7, 0x1e, 0xa1, 0x61, 0x92, 0xb1, 0x31, 0xe1, 0x91, - 0x3d, 0xb1, 0x2b, 0xd8, 0xe9, 0x51, 0x8c, 0xe2, 0xe4, 0x1b, 0xca, 0xed, 0xec, 0xc6, 0x50, 0xbd, - 0x59, 0x9a, 0x52, 0x73, 0xbb, 0x7a, 0xd8, 0x18, 0x25, 0xfb, 0x86, 0xc3, 0x3e, 0xdc, 0x87, 0x4b, - 0xaa, 0x18, 0xa5, 0xe3, 0xe2, 0xed, 0x5f, 0x83, 0xfa, 0x53, 0x3a, 0xb3, 0xf9, 0x51, 0x4d, 0x75, - 0x7d, 0xd2, 0x63, 0x49, 0x99, 0xcc, 0xf7, 0x78, 0xe5, 0xfa, 0xdc, 0xd5, 0x2e, 0x9c, 0x43, 0xc2, - 0xef, 0x3c, 0xf0, 0x4d, 0x5f, 0x59, 0x8d, 0x7b, 0x0b, 0xaa, 0xf1, 0x5a, 0x59, 0x8d, 0x5f, 0x87, - 0x96, 0xe4, 0x64, 0x44, 0x9f, 0x44, 0x63, 0xfb, 0xfe, 0x37, 0xb5, 0xfd, 0x70, 0xac, 0x5e, 0x3a, - 0x6e, 0x99, 0x15, 0x55, 0xb9, 0xf9, 0xdf, 0x18, 0xe4, 0xff, 0x1b, 0x83, 0xfb, 0x6c, 0x86, 0x0b, - 0xd4, 0x27, 0x8d, 0x56, 0x7d, 0xad, 0x11, 0xde, 0x34, 0xeb, 0x72, 0x8a, 0xea, 0x2e, 0xb4, 0xc6, - 0x91, 0x50, 0xd7, 0xc7, 0x58, 0xf3, 0x69, 0xe1, 0xc2, 0x0e, 0x0f, 0xa1, 0xb3, 0x7b, 0x9c, 0xc6, - 0x84, 0xe9, 0x12, 0x5f, 0x95, 0xc3, 0xcc, 0x29, 0x87, 0x55, 0x1b, 0xdd, 0x81, 0xd6, 0xe8, 0x28, - 0x8a, 0xc7, 0x9c, 0x32, 0xab, 0xc2, 0xb5, 0xaa, 0x0a, 0xc5, 0x70, 0x5c, 0x00, 0xc3, 0x6d, 0x68, - 0xe5, 0xff, 0x0d, 0x28, 0x80, 0xa6, 0xa0, 0xa3, 0x84, 0x8d, 0xf3, 0xab, 0x37, 0x37, 0x95, 0x4c, - 0x8c, 0xb0, 0xc4, 0x6c, 0xfb, 0x15, 0x6c, 0x8c, 0xf0, 0x07, 0x0f, 0x5a, 0xf9, 0x43, 0xbe, 0x90, - 0xd1, 0x36, 0x5c, 0xa4, 0xc7, 0x74, 0x94, 0xa9, 0xe8, 0x8f, 0xa3, 0x69, 0xfe, 0x0c, 0x2f, 0xfe, - 0x6b, 0xa9, 0x42, 0x95, 0xac, 0xc5, 0x6a, 0xcc, 0x1e, 0x3d, 0xa5, 0x80, 0xc8, 0x51, 0x3b, 0xc1, - 0x2f, 0x2f, 0x36, 0xbc, 0xdf, 0x5e, 0x6c, 0x78, 0x7f, 0xbc, 0xd8, 0xf0, 0xbe, 0x72, 0xfe, 0x0e, - 0x87, 0xbe, 0x4e, 0xc4, 0x9d, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x35, 0x8c, 0x05, 0x1e, 0x5e, - 0x0e, 0x00, 0x00, + // 1505 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x58, 0xcd, 0x6f, 0xdb, 0x46, + 0x16, 0x0f, 0x25, 0x99, 0x96, 0x9e, 0x1c, 0x3b, 0x19, 0x7b, 0x13, 0xd9, 0xeb, 0x35, 0x1d, 0x66, + 0xb1, 0xf0, 0x66, 0x13, 0x19, 0xf0, 0x22, 0x7b, 0x08, 0xb0, 0xd9, 0x35, 0x1b, 0xb7, 0x4e, 0x91, + 0x0f, 0x67, 0x1c, 0xa4, 0x40, 0x2f, 0xc1, 0x48, 0x9a, 0xca, 0x6c, 0x28, 0x92, 0x99, 0x19, 0x26, + 0xf6, 0xad, 0x7f, 0x43, 0xd1, 0x43, 0x8f, 0x2d, 0xd0, 0x63, 0xd1, 0xbf, 0x23, 0x87, 0x1e, 0x7a, + 0x2e, 0x50, 0xb6, 0xc8, 0x91, 0xa7, 0x5e, 0x7a, 0x2f, 0xe6, 0x83, 0x22, 0x29, 0xdb, 0x32, 0x8c, + 0x5e, 0x5a, 0xf4, 0x62, 0xcd, 0xbc, 0xef, 0xf7, 0x7b, 0x6f, 0x86, 0x6f, 0x0c, 0x37, 0xfa, 0x11, + 0x13, 0xf4, 0x70, 0xf3, 0x65, 0x42, 0x99, 0x4f, 0x99, 0xfa, 0x3d, 0x62, 0x24, 0x1c, 0xd2, 0xd2, + 0xb2, 0x1b, 0xb3, 0x48, 0x44, 0x08, 0x0a, 0xca, 0xca, 0xd2, 0x30, 0x1a, 0x46, 0x8a, 0xbc, 0x29, + 0x57, 0x5a, 0x62, 0x65, 0x6d, 0x18, 0x45, 0xc3, 0x80, 0x6e, 0xaa, 0x5d, 0x2f, 0xf9, 0x68, 0x73, + 0x90, 0x30, 0x22, 0xfc, 0x28, 0x34, 0xfc, 0x55, 0xe3, 0x4d, 0xff, 0xc4, 0x3d, 0xb3, 0x30, 0xdc, + 0xe5, 0x49, 0x6d, 0x12, 0x1e, 0x69, 0x96, 0xbb, 0x0f, 0x57, 0xf7, 0x58, 0x34, 0xa2, 0xe2, 0x80, + 0x26, 0x1c, 0xd3, 0x97, 0x09, 0xe5, 0x62, 0x97, 0x92, 0x01, 0x65, 0x68, 0x19, 0x1a, 0x8f, 0xc8, + 0x88, 0x76, 0xac, 0x75, 0x6b, 0xa3, 0xe5, 0xcd, 0x64, 0xa9, 0x63, 0xdd, 0xc2, 0x8a, 0x84, 0xfe, + 0x06, 0xf6, 0x33, 0x12, 0x24, 0x94, 0x77, 0x6a, 0xeb, 0xf5, 0x82, 0x69, 0x88, 0x6e, 0x5a, 0x83, + 0xcb, 0xc7, 0xac, 0x22, 0x04, 0x8d, 0x98, 0x88, 0x03, 0x6d, 0x0f, 0xab, 0x35, 0x5a, 0x82, 0x19, + 0x2e, 0x08, 0x13, 0x9d, 0xda, 0xba, 0xb5, 0x51, 0xc7, 0x7a, 0x83, 0x2e, 0x41, 0x9d, 0x86, 0x83, + 0x4e, 0x5d, 0xd1, 0xe4, 0x52, 0xea, 0x72, 0x41, 0xe3, 0x4e, 0x43, 0x91, 0xd4, 0x1a, 0xfd, 0x17, + 0x66, 0x85, 0x3f, 0xa2, 0x51, 0x22, 0x3a, 0x33, 0xeb, 0xd6, 0x46, 0x7b, 0x6b, 0xb9, 0xab, 0xf3, + 0xec, 0xe6, 0x79, 0x76, 0xef, 0x19, 0x94, 0xbc, 0xe6, 0x9b, 0xd4, 0xb9, 0xf0, 0xf9, 0x8f, 0x8e, + 0x85, 0x73, 0x1d, 0xe9, 0x5a, 0xc1, 0xde, 0xb1, 0x55, 0x3c, 0x7a, 0x83, 0x76, 0x61, 0xbe, 0x4f, + 0xfa, 0x07, 0x7e, 0x38, 0x7c, 0x1c, 0x4b, 0x4d, 0xde, 0x99, 0x55, 0xb6, 0x57, 0xba, 0xa5, 0xaa, + 0xbd, 0x53, 0x91, 0xf0, 0x1a, 0xd2, 0x38, 0x9e, 0xd0, 0x43, 0xf7, 0x60, 0x56, 0x03, 0xc9, 0x3b, + 0xcd, 0xf5, 0xfa, 0x46, 0x7b, 0xeb, 0x7a, 0xd9, 0xc4, 0x29, 0xa0, 0xe7, 0x48, 0xe6, 0xaa, 0x06, + 0x20, 0xc1, 0x3b, 0x2d, 0x1d, 0xa5, 0xda, 0xb8, 0x4f, 0xa1, 0x53, 0x36, 0xc0, 0xe3, 0x28, 0xe4, + 0xf4, 0x37, 0x97, 0xed, 0x87, 0x1a, 0xa0, 0xe3, 0x66, 0x91, 0x0b, 0xf6, 0xbe, 0x20, 0x22, 0xe1, + 0xc6, 0x24, 0x64, 0xa9, 0x63, 0x73, 0x45, 0xc1, 0x86, 0x83, 0xde, 0x85, 0xc6, 0x3d, 0x22, 0x88, + 0x2a, 0xe3, 0x04, 0x58, 0x85, 0x45, 0x29, 0xe1, 0x5d, 0x91, 0x60, 0x65, 0xa9, 0x33, 0x3f, 0x20, + 0x82, 0xdc, 0x8c, 0x46, 0xbe, 0xa0, 0xa3, 0x58, 0x1c, 0x61, 0xa5, 0x8f, 0x6e, 0x43, 0x6b, 0x87, + 0xb1, 0x88, 0x3d, 0x3d, 0x8a, 0xa9, 0xaa, 0x7f, 0xcb, 0xbb, 0x9a, 0xa5, 0xce, 0x22, 0xcd, 0x89, + 0x25, 0x8d, 0x42, 0x12, 0xfd, 0x13, 0x66, 0xd4, 0x46, 0xf5, 0x47, 0xcb, 0x5b, 0xcc, 0x52, 0x67, + 0x41, 0xa9, 0x94, 0xc4, 0xb5, 0x04, 0xda, 0x29, 0xca, 0x32, 0xa3, 0xca, 0xf2, 0xf7, 0xd3, 0xca, + 0x52, 0x46, 0xf5, 0x58, 0x5d, 0xb6, 0xa0, 0xf9, 0x01, 0x61, 0xa1, 0x1f, 0x0e, 0x79, 0xc7, 0x56, + 0x60, 0x5e, 0xc9, 0x52, 0x07, 0xbd, 0x36, 0xb4, 0x92, 0xdf, 0xb1, 0x9c, 0xfb, 0x69, 0x0d, 0xe6, + 0xab, 0x68, 0xa0, 0x2e, 0x00, 0xa6, 0x3c, 0x09, 0x84, 0x4a, 0x58, 0xe3, 0x3b, 0x9f, 0xa5, 0x0e, + 0xb0, 0x31, 0x15, 0x97, 0x24, 0xd0, 0xff, 0xc1, 0xd6, 0x3b, 0x55, 0xc1, 0xf6, 0x56, 0xa7, 0x1c, + 0xfc, 0x3e, 0x19, 0xc5, 0x01, 0xdd, 0x17, 0x8c, 0x92, 0x91, 0x37, 0x6f, 0x70, 0xb6, 0xb5, 0x25, + 0x6c, 0xf4, 0xd0, 0xa3, 0xbc, 0xa1, 0xea, 0xaa, 0x54, 0xd7, 0xa7, 0x67, 0x2f, 0xcb, 0xcb, 0x35, + 0x9e, 0x4a, 0xab, 0x8c, 0xa7, 0x22, 0xa0, 0xbb, 0xd0, 0x24, 0x21, 0x09, 0x8e, 0xb8, 0xcf, 0x15, + 0xfa, 0xed, 0xad, 0xa5, 0xb2, 0xc9, 0x6d, 0xc3, 0xf3, 0xe6, 0xb2, 0xd4, 0x19, 0x4b, 0xe2, 0xf1, + 0xca, 0xfd, 0xa5, 0x06, 0x6b, 0x85, 0xdf, 0xfb, 0x21, 0x17, 0x24, 0x14, 0x4f, 0xa4, 0x81, 0x73, + 0x35, 0x20, 0xae, 0x34, 0xe0, 0x3f, 0x4e, 0xce, 0xaa, 0x6c, 0xfd, 0xcf, 0xde, 0x8c, 0xdf, 0xd4, + 0x60, 0xe5, 0x74, 0x64, 0xce, 0xdd, 0x98, 0x7b, 0xa5, 0xc6, 0x94, 0x15, 0xd8, 0x38, 0xbb, 0x02, + 0x5a, 0xfe, 0x0f, 0xd3, 0xa8, 0x3f, 0x5b, 0xb0, 0x3a, 0x2d, 0x11, 0x74, 0x03, 0x6c, 0xde, 0x27, + 0x01, 0x61, 0x0a, 0xae, 0xf6, 0xd6, 0xa5, 0x6e, 0xfe, 0x35, 0x36, 0x27, 0x73, 0xf7, 0x02, 0x36, + 0x12, 0xe8, 0x2e, 0xcc, 0x71, 0xc1, 0xfc, 0x70, 0xa8, 0x39, 0x06, 0xb4, 0xea, 0x69, 0x2e, 0xf1, + 0x77, 0x2f, 0xe0, 0x8a, 0x3c, 0xba, 0x09, 0xf6, 0x2b, 0xda, 0x17, 0x11, 0x33, 0xe8, 0xa0, 0xb2, + 0xe6, 0x33, 0xc5, 0x91, 0xde, 0xb4, 0x8c, 0x94, 0x1e, 0x11, 0xc1, 0xfc, 0x43, 0x93, 0x78, 0x45, + 0xfa, 0xa1, 0xe2, 0x48, 0x69, 0x2d, 0xe3, 0x35, 0xc1, 0x94, 0xc2, 0xfd, 0x0f, 0xd8, 0xcf, 0x72, + 0x0b, 0xb3, 0x5c, 0x79, 0x96, 0x67, 0xb0, 0x3e, 0x69, 0x42, 0x07, 0x85, 0x73, 0x11, 0x77, 0x17, + 0x6c, 0x6d, 0x15, 0xdd, 0x85, 0x8b, 0xbc, 0x74, 0x2b, 0xe5, 0xda, 0xa7, 0x5e, 0x5b, 0xb8, 0x2a, + 0xee, 0x06, 0xd5, 0xf1, 0xa4, 0x54, 0x6b, 0xf4, 0xa4, 0x1c, 0x92, 0xcc, 0xea, 0xc6, 0x19, 0x1d, + 0xa2, 0x85, 0x75, 0xa3, 0xb4, 0xb3, 0xd4, 0xc9, 0xd5, 0x8b, 0xb8, 0x3f, 0xab, 0xdc, 0x45, 0x27, + 0x29, 0xa2, 0xc7, 0xf0, 0x17, 0x11, 0x09, 0x12, 0xa8, 0xc2, 0x93, 0x5e, 0x90, 0x73, 0x55, 0x0c, + 0x75, 0x6f, 0x39, 0x4b, 0x9d, 0x93, 0x05, 0xf0, 0xc9, 0x64, 0xf4, 0x85, 0x05, 0xab, 0x27, 0x72, + 0xf6, 0x28, 0xdb, 0x97, 0x23, 0x8f, 0xbe, 0xe8, 0xef, 0x4c, 0x4f, 0x6e, 0x52, 0x59, 0x05, 0x6b, + 0x2c, 0x78, 0xeb, 0x59, 0xea, 0x4c, 0xf5, 0x81, 0xa7, 0x72, 0x5d, 0x1f, 0xce, 0xe9, 0x51, 0x4e, + 0x2d, 0xaf, 0xe4, 0x4c, 0xa1, 0x51, 0xc1, 0x7a, 0x83, 0xae, 0xc1, 0x9c, 0x1c, 0xbe, 0xb8, 0x20, + 0xa3, 0xf8, 0xf9, 0x88, 0x9b, 0x99, 0xaf, 0x3d, 0xa6, 0x3d, 0xe4, 0xee, 0x97, 0x35, 0x98, 0x2b, + 0xf7, 0x03, 0xfa, 0xc4, 0x02, 0x3b, 0x20, 0x3d, 0x1a, 0xe4, 0xad, 0xb3, 0x58, 0x9c, 0xaa, 0x07, + 0x92, 0xbe, 0x47, 0x7c, 0xe6, 0xed, 0xcb, 0x3b, 0xe4, 0xfb, 0xd4, 0xd9, 0x1e, 0xfa, 0xe2, 0x20, + 0xe9, 0x75, 0xfb, 0xd1, 0x68, 0x53, 0x1c, 0x90, 0x30, 0xe2, 0xb7, 0xfc, 0xc8, 0xac, 0x36, 0xfd, + 0x50, 0x50, 0x16, 0x92, 0x60, 0x73, 0x62, 0x56, 0xd6, 0x76, 0xb6, 0x07, 0x24, 0x16, 0x94, 0xc9, + 0x8b, 0x68, 0x44, 0x05, 0xf3, 0xfb, 0xd8, 0xf8, 0x45, 0x77, 0x8a, 0x46, 0xd3, 0xb5, 0x38, 0x76, + 0xb0, 0x8b, 0x3b, 0x4c, 0x25, 0x5a, 0x74, 0x14, 0xc2, 0x00, 0x07, 0x3e, 0x17, 0xd1, 0x90, 0xc9, + 0xe6, 0xaf, 0x2b, 0x75, 0xe7, 0x78, 0xf3, 0xef, 0xe6, 0x32, 0x2a, 0x9b, 0xcb, 0xc6, 0x5a, 0x6b, + 0xac, 0x8a, 0x4b, 0x56, 0xdc, 0xaf, 0x6a, 0x60, 0x9b, 0x6b, 0xe0, 0x77, 0x80, 0xce, 0xbf, 0xa0, + 0xad, 0x93, 0x55, 0x43, 0xa4, 0xaa, 0xa9, 0xe5, 0xb5, 0xb2, 0xd4, 0xd1, 0x45, 0xc7, 0x65, 0x2e, + 0x5a, 0x85, 0xd6, 0xb8, 0xda, 0x66, 0xbc, 0x2f, 0x08, 0xe8, 0x01, 0x14, 0x19, 0x9b, 0x9b, 0xea, + 0xaf, 0x53, 0xb0, 0x52, 0x38, 0x59, 0x55, 0x9c, 0x8a, 0xa5, 0xfb, 0x1e, 0xcc, 0x95, 0xaf, 0xd0, + 0x6a, 0x4f, 0xb6, 0xce, 0xd1, 0x93, 0x02, 0x16, 0x4f, 0xa8, 0x52, 0x35, 0x17, 0x6b, 0x32, 0x97, + 0xff, 0x95, 0x73, 0xa9, 0x9d, 0x9d, 0x8b, 0x7e, 0x43, 0x94, 0xc2, 0x8f, 0x61, 0x61, 0x42, 0x46, + 0x66, 0xd0, 0x8f, 0x92, 0x50, 0x28, 0x6f, 0x16, 0xd6, 0x1b, 0xf9, 0x58, 0xe2, 0x89, 0xf6, 0x61, + 0x61, 0xb9, 0x44, 0xb7, 0x61, 0xb6, 0x97, 0xf4, 0x5f, 0x50, 0x91, 0x77, 0x5c, 0xc5, 0x73, 0xe1, + 0x53, 0xc9, 0xe0, 0x5c, 0xd6, 0xe5, 0xb0, 0x30, 0xc1, 0x43, 0x6b, 0x00, 0xbd, 0x28, 0x09, 0x07, + 0x84, 0xf9, 0xe6, 0x8a, 0x9b, 0xc1, 0x25, 0x8a, 0x8c, 0x28, 0x88, 0x5e, 0x53, 0x66, 0xbc, 0xeb, + 0x8d, 0xa4, 0x26, 0x71, 0x4c, 0xf5, 0xb7, 0xc9, 0xc2, 0x7a, 0x53, 0x44, 0xdf, 0x28, 0x45, 0xef, + 0x7e, 0x0c, 0xf3, 0xf2, 0x35, 0x45, 0x07, 0xe3, 0x69, 0x6f, 0x19, 0xea, 0x2f, 0xe8, 0x91, 0x19, + 0x39, 0x66, 0xb3, 0xd4, 0x91, 0x5b, 0x2c, 0xff, 0xc8, 0x17, 0x1f, 0x3d, 0x14, 0x34, 0x14, 0xf9, + 0x49, 0xac, 0x7c, 0x85, 0x76, 0x14, 0xcb, 0x5b, 0x30, 0xa7, 0x27, 0x17, 0xc5, 0xf9, 0xc2, 0xfd, + 0xda, 0x02, 0x5b, 0x0b, 0x21, 0x27, 0x7f, 0x77, 0xea, 0x6b, 0x5b, 0xf5, 0xab, 0x22, 0xe4, 0x4f, + 0xd0, 0x65, 0xfd, 0x04, 0x55, 0xed, 0xa0, 0xa3, 0xa0, 0xe1, 0x40, 0xbf, 0x45, 0xd7, 0xa1, 0x29, + 0x18, 0xe9, 0xd3, 0xe7, 0xfe, 0xc0, 0x8c, 0x78, 0xf9, 0x3c, 0xa6, 0xc8, 0xf7, 0x07, 0x72, 0xd4, + 0x60, 0x26, 0x1d, 0xf3, 0x34, 0x5d, 0x3a, 0xf6, 0x34, 0xdd, 0x0e, 0x8f, 0xf4, 0xa8, 0x91, 0x4b, + 0xe2, 0xf1, 0xea, 0xfd, 0x46, 0xb3, 0x7e, 0xa9, 0xe1, 0xde, 0xd4, 0xd0, 0x94, 0x9e, 0x94, 0x2b, + 0xd0, 0x1c, 0xf8, 0x5c, 0x5e, 0xba, 0x03, 0x15, 0x78, 0x13, 0x8f, 0xf7, 0x6e, 0x08, 0xed, 0x9d, + 0xc3, 0x38, 0x20, 0xa1, 0x7a, 0xf0, 0xa2, 0x55, 0x68, 0x84, 0xc5, 0x2b, 0xb0, 0x99, 0xa5, 0x8e, + 0xda, 0x63, 0xf5, 0x17, 0x6d, 0x43, 0xb3, 0x7f, 0xe0, 0x07, 0x03, 0x46, 0x43, 0x83, 0xe4, 0xd5, + 0x2a, 0x92, 0x63, 0x43, 0x3a, 0xc6, 0x5c, 0x18, 0x8f, 0x57, 0xee, 0xb7, 0x16, 0x34, 0xf3, 0x99, + 0xe9, 0x0c, 0x6f, 0x3d, 0xb8, 0x48, 0x0f, 0x69, 0x3f, 0x91, 0xf6, 0x9e, 0xfa, 0xa3, 0x7c, 0xda, + 0x99, 0xf2, 0x5c, 0xbf, 0x66, 0x6e, 0xac, 0x66, 0x4e, 0xc9, 0x52, 0xa7, 0x6a, 0x03, 0x57, 0xb7, + 0x12, 0xf2, 0x71, 0x46, 0xba, 0xe9, 0xa7, 0x4c, 0x77, 0xc7, 0xd3, 0xf1, 0x3a, 0x6f, 0xde, 0xae, + 0x59, 0xdf, 0xbd, 0x5d, 0xb3, 0x7e, 0x7a, 0xbb, 0x66, 0x7d, 0x58, 0xfa, 0x87, 0x4c, 0xcf, 0x56, + 0xe1, 0xfd, 0xfb, 0xd7, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf5, 0x48, 0xa7, 0xb6, 0xd1, 0x11, 0x00, + 0x00, } func (m *PrometheusRequestHeader) Marshal() (dAtA []byte, err error) { @@ -1972,18 +1806,16 @@ func (m *PrometheusRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x42 } } - if m.CachingOptions != nil { - { - size, err := m.CachingOptions.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + { + size, err := m.CachingOptions.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x3a + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x3a if len(m.Query) > 0 { i -= len(m.Query) copy(dAtA[i:], m.Query) @@ -1991,18 +1823,14 @@ func (m *PrometheusRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x32 } - if m.Timeout != nil { - { - size, err := m.Timeout.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x2a + n2, err2 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.Timeout, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdDuration(m.Timeout):]) + if err2 != nil { + return 0, err2 } + i -= n2 + i = encodeVarintQueryrange(dAtA, i, uint64(n2)) + i-- + dAtA[i] = 0x2a if m.Step != 0 { i = encodeVarintQueryrange(dAtA, i, uint64(m.Step)) i-- @@ -2132,18 +1960,16 @@ func (m *PrometheusResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x1a } - if m.Data != nil { - { - size, err := m.Data.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + { + size, err := m.Data.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 if len(m.Status) > 0 { i -= len(m.Status) copy(dAtA[i:], m.Status) @@ -2287,18 +2113,16 @@ func (m *PrometheusInstantQueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, i-- dAtA[i] = 0x1a } - if m.Data != nil { - { - size, err := m.Data.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + { + size, err := m.Data.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 if len(m.Status) > 0 { i -= len(m.Status) copy(dAtA[i:], m.Status) @@ -2357,18 +2181,16 @@ func (m *PrometheusInstantQueryData) MarshalToSizedBuffer(dAtA []byte) (int, err i-- dAtA[i] = 0x1a } - if m.Result != nil { - { - size, err := m.Result.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + { + size, err := m.Result.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 if len(m.ResultType) > 0 { i -= len(m.ResultType) copy(dAtA[i:], m.ResultType) @@ -2758,11 +2580,11 @@ func (m *SampleStream) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.Labels) > 0 { for iNdEx := len(m.Labels) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.Labels[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Labels[iNdEx].Size() + i -= size + if _, err := m.Labels[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintQueryrange(dAtA, i, uint64(size)) } i-- @@ -2822,11 +2644,11 @@ func (m *Sample) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.Labels) > 0 { for iNdEx := len(m.Labels) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.Labels[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Labels[iNdEx].Size() + i -= size + if _, err := m.Labels[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintQueryrange(dAtA, i, uint64(size)) } i-- @@ -2899,18 +2721,16 @@ func (m *SampleHistogramPair) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if m.Histogram != nil { - { - size, err := m.Histogram.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + { + size, err := m.Histogram.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 if m.Timestamp != 0 { i = encodeVarintQueryrange(dAtA, i, uint64(m.Timestamp)) i-- @@ -3211,43 +3031,6 @@ func (m *Explanation) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *Duration) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *Duration) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *Duration) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.Nanos != 0 { - i = encodeVarintQueryrange(dAtA, i, uint64(m.Nanos)) - i-- - dAtA[i] = 0x10 - } - if m.Seconds != 0 { - i = encodeVarintQueryrange(dAtA, i, uint64(m.Seconds)) - i-- - dAtA[i] = 0x8 - } - return len(dAtA) - i, nil -} - func (m *Analysis) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3286,18 +3069,16 @@ func (m *Analysis) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x1a } } - if m.ExecutionTime != nil { - { - size, err := m.ExecutionTime.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + { + size := m.ExecutionTime.Size() + i -= size + if _, err := m.ExecutionTime.MarshalTo(dAtA[i:]); err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) @@ -3360,18 +3141,14 @@ func (m *PrometheusRequest) Size() (n int) { if m.Step != 0 { n += 1 + sovQueryrange(uint64(m.Step)) } - if m.Timeout != nil { - l = m.Timeout.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = github_com_gogo_protobuf_types.SizeOfStdDuration(m.Timeout) + n += 1 + l + sovQueryrange(uint64(l)) l = len(m.Query) if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) } - if m.CachingOptions != nil { - l = m.CachingOptions.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = m.CachingOptions.Size() + n += 1 + l + sovQueryrange(uint64(l)) if len(m.Headers) > 0 { for _, e := range m.Headers { l = e.Size() @@ -3420,10 +3197,8 @@ func (m *PrometheusResponse) Size() (n int) { if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) } - if m.Data != nil { - l = m.Data.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = m.Data.Size() + n += 1 + l + sovQueryrange(uint64(l)) l = len(m.ErrorType) if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) @@ -3490,10 +3265,8 @@ func (m *PrometheusInstantQueryResponse) Size() (n int) { if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) } - if m.Data != nil { - l = m.Data.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = m.Data.Size() + n += 1 + l + sovQueryrange(uint64(l)) l = len(m.ErrorType) if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) @@ -3530,10 +3303,8 @@ func (m *PrometheusInstantQueryData) Size() (n int) { if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) } - if m.Result != nil { - l = m.Result.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = m.Result.Size() + n += 1 + l + sovQueryrange(uint64(l)) if m.Stats != nil { l = m.Stats.Size() n += 1 + l + sovQueryrange(uint64(l)) @@ -3788,10 +3559,8 @@ func (m *SampleHistogramPair) Size() (n int) { if m.Timestamp != 0 { n += 1 + sovQueryrange(uint64(m.Timestamp)) } - if m.Histogram != nil { - l = m.Histogram.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = m.Histogram.Size() + n += 1 + l + sovQueryrange(uint64(l)) if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3931,24 +3700,6 @@ func (m *Explanation) Size() (n int) { return n } -func (m *Duration) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.Seconds != 0 { - n += 1 + sovQueryrange(uint64(m.Seconds)) - } - if m.Nanos != 0 { - n += 1 + sovQueryrange(uint64(m.Nanos)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - func (m *Analysis) Size() (n int) { if m == nil { return 0 @@ -3959,10 +3710,8 @@ func (m *Analysis) Size() (n int) { if l > 0 { n += 1 + l + sovQueryrange(uint64(l)) } - if m.ExecutionTime != nil { - l = m.ExecutionTime.Size() - n += 1 + l + sovQueryrange(uint64(l)) - } + l = m.ExecutionTime.Size() + n += 1 + l + sovQueryrange(uint64(l)) if len(m.Children) > 0 { for _, e := range m.Children { l = e.Size() @@ -4243,10 +3992,7 @@ func (m *PrometheusRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Timeout == nil { - m.Timeout = &Duration{} - } - if err := m.Timeout.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdDurationUnmarshal(&m.Timeout, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -4311,9 +4057,6 @@ func (m *PrometheusRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.CachingOptions == nil { - m.CachingOptions = &CachingOptions{} - } if err := m.CachingOptions.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -4611,9 +4354,6 @@ func (m *PrometheusResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Data == nil { - m.Data = &PrometheusData{} - } if err := m.Data.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -4860,7 +4600,7 @@ func (m *PrometheusData) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Result = append(m.Result, &SampleStream{}) + m.Result = append(m.Result, SampleStream{}) if err := m.Result[len(m.Result)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -5049,9 +4789,6 @@ func (m *PrometheusInstantQueryResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Data == nil { - m.Data = &PrometheusInstantQueryData{} - } if err := m.Data.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -5298,9 +5035,6 @@ func (m *PrometheusInstantQueryData) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Result == nil { - m.Result = &PrometheusInstantQueryResult{} - } if err := m.Result.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6098,7 +5832,7 @@ func (m *SampleStream) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &cortexpb.LabelPair{}) + m.Labels = append(m.Labels, github_com_thanos_io_thanos_internal_cortex_cortexpb.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6132,7 +5866,7 @@ func (m *SampleStream) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Samples = append(m.Samples, &cortexpb.Sample{}) + m.Samples = append(m.Samples, cortexpb.Sample{}) if err := m.Samples[len(m.Samples)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6166,7 +5900,7 @@ func (m *SampleStream) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Histograms = append(m.Histograms, &SampleHistogramPair{}) + m.Histograms = append(m.Histograms, SampleHistogramPair{}) if err := m.Histograms[len(m.Histograms)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6251,7 +5985,7 @@ func (m *Sample) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &cortexpb.LabelPair{}) + m.Labels = append(m.Labels, github_com_thanos_io_thanos_internal_cortex_cortexpb.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6523,9 +6257,6 @@ func (m *SampleHistogramPair) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Histogram == nil { - m.Histogram = &SampleHistogram{} - } if err := m.Histogram.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6852,7 +6583,7 @@ func (m *CachedResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Extents = append(m.Extents, &Extent{}) + m.Extents = append(m.Extents, Extent{}) if err := m.Extents[len(m.Extents)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7224,95 +6955,6 @@ func (m *Explanation) Unmarshal(dAtA []byte) error { } return nil } -func (m *Duration) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQueryrange - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Duration: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Duration: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Seconds", wireType) - } - m.Seconds = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQueryrange - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Seconds |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Nanos", wireType) - } - m.Nanos = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQueryrange - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Nanos |= int32(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipQueryrange(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthQueryrange - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} func (m *Analysis) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -7403,9 +7045,6 @@ func (m *Analysis) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.ExecutionTime == nil { - m.ExecutionTime = &Duration{} - } if err := m.ExecutionTime.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/internal/cortex/querier/queryrange/queryrange.proto b/internal/cortex/querier/queryrange/queryrange.proto index 0b956ac505..21edc5783c 100644 --- a/internal/cortex/querier/queryrange/queryrange.proto +++ b/internal/cortex/querier/queryrange/queryrange.proto @@ -7,85 +7,65 @@ package queryrange; option go_package = "queryrange"; +import "gogoproto/gogo.proto"; +import "google/protobuf/duration.proto"; import "cortex/cortexpb/cortex.proto"; import "google/protobuf/any.proto"; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; + message PrometheusRequestHeader { - // @gotags: json:"-" - string Name = 1; - // @gotags: json:"-" - repeated string Values = 2; + string Name = 1 [(gogoproto.jsontag) = "-"]; + repeated string Values = 2 [(gogoproto.jsontag) = "-"]; } message PrometheusRequest { string path = 1; int64 start = 2; int64 end = 3; int64 step = 4; - Duration timeout = 5; + google.protobuf.Duration timeout = 5 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; string query = 6; - CachingOptions cachingOptions = 7; - // @gotags: json:"-" - repeated PrometheusRequestHeader Headers = 8; + CachingOptions cachingOptions = 7 [(gogoproto.nullable) = false]; + repeated PrometheusRequestHeader Headers = 8 [(gogoproto.jsontag) = "-"]; string stats = 9; } message PrometheusResponseHeader { - // @gotags: json:"-" - string Name = 1; - // @gotags: json:"-" - repeated string Values = 2; + string Name = 1 [(gogoproto.jsontag) = "-"]; + repeated string Values = 2 [(gogoproto.jsontag) = "-"]; } message PrometheusResponse { - // @gotags: json:"status" - string Status = 1; - // @gotags: json:"data,omitempty" - PrometheusData Data = 2; - // @gotags: json:"errorType,omitempty" - string ErrorType = 3; - // @gotags: json:"error,omitempty" - string Error = 4; - // @gotags: json:"-" - repeated PrometheusResponseHeader Headers = 5; - // @gotags: json:"warnings,omitempty" - repeated string Warnings = 6; + string Status = 1 [(gogoproto.jsontag) = "status"]; + PrometheusData Data = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "data,omitempty"]; + string ErrorType = 3 [(gogoproto.jsontag) = "errorType,omitempty"]; + string Error = 4 [(gogoproto.jsontag) = "error,omitempty"]; + repeated PrometheusResponseHeader Headers = 5 [(gogoproto.jsontag) = "-"]; + repeated string Warnings = 6 [(gogoproto.jsontag) = "warnings,omitempty"]; } message PrometheusData { - // @gotags: json:"resultType" - string ResultType = 1; - // @gotags: json:"result" - repeated SampleStream Result = 2; - // @gotags: json:"stats,omitempty" - PrometheusResponseStats stats = 3; - // @gotags: json:"analysis" - Analysis analysis = 4; + string ResultType = 1 [(gogoproto.jsontag) = "resultType"]; + repeated SampleStream Result = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "result"]; + PrometheusResponseStats stats = 3 [(gogoproto.jsontag) = "stats,omitempty"]; + Analysis analysis = 4 [(gogoproto.jsontag) = "analysis"]; } message PrometheusInstantQueryResponse { - // @gotags: json:"status" - string Status = 1; - // @gotags: json:"data,omitempty" - PrometheusInstantQueryData Data = 2; - // @gotags: json:"errorType,omitempty" - string ErrorType = 3; - // @gotags: json:"error,omitempty" - string Error = 4; - // @gotags: json:"-" - repeated PrometheusResponseHeader Headers = 5; - // @gotags: json:"warnings,omitempty" - repeated string Warnings = 6; + string Status = 1 [(gogoproto.jsontag) = "status"]; + PrometheusInstantQueryData Data = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "data,omitempty"]; + string ErrorType = 3 [(gogoproto.jsontag) = "errorType,omitempty"]; + string Error = 4 [(gogoproto.jsontag) = "error,omitempty"]; + repeated PrometheusResponseHeader Headers = 5 [(gogoproto.jsontag) = "-"]; + repeated string Warnings = 6 [(gogoproto.jsontag) = "warnings,omitempty"]; } message PrometheusInstantQueryData { - // @gotags: json:"resultType" - string ResultType = 1; - // @gotags: json:"result" - PrometheusInstantQueryResult Result = 2; - // @gotags: json:"stats,omitempty" - PrometheusResponseStats stats = 3; - // @gotags: json:"analysis" - Analysis analysis = 4; + string ResultType = 1 [(gogoproto.jsontag) = "resultType"]; + PrometheusInstantQueryResult Result = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "result"]; + PrometheusResponseStats stats = 3 [(gogoproto.jsontag) = "stats,omitempty"]; + Analysis analysis = 4 [(gogoproto.jsontag) = "analysis"]; } message PrometheusInstantQueryResult { @@ -106,15 +86,12 @@ message Matrix { } message PrometheusResponseStats { - // @gotags: json:"samples" - PrometheusResponseSamplesStats samples = 1; + PrometheusResponseSamplesStats samples = 1 [(gogoproto.jsontag) = "samples"]; } message PrometheusResponseSamplesStats { - // @gotags: json:"totalQueryableSamples" - int64 totalQueryableSamples = 1; - // @gotags: json:"totalQueryableSamplesPerStep" - repeated PrometheusResponseQueryableSamplesStatsPerStep totalQueryableSamplesPerStep = 2; + int64 totalQueryableSamples = 1 [(gogoproto.jsontag) = "totalQueryableSamples"]; + repeated PrometheusResponseQueryableSamplesStatsPerStep totalQueryableSamplesPerStep = 2 [(gogoproto.jsontag) = "totalQueryableSamplesPerStep"]; } message PrometheusResponseQueryableSamplesStatsPerStep { @@ -123,22 +100,16 @@ message PrometheusResponseQueryableSamplesStatsPerStep { } message SampleStream { - // @gotags: json:"metric" - repeated cortexpb.LabelPair labels = 1; - // @gotags: json:"values" - repeated cortexpb.Sample samples = 2; - // @gotags: json:"histogram" - repeated SampleHistogramPair histograms = 3; + repeated cortexpb.LabelPair labels = 1 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "metric", (gogoproto.customtype) = "github.com/thanos-io/thanos/internal/cortex/cortexpb.LabelAdapter"]; + repeated cortexpb.Sample samples = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "values"]; + repeated SampleHistogramPair histograms = 3 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "histogram"]; } message Sample { - // @gotags: json:"metric" - repeated cortexpb.LabelPair labels = 1; - // @gotags: json:"value" - double sampleValue = 2; + repeated cortexpb.LabelPair labels = 1 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "metric", (gogoproto.customtype) = "github.com/thanos-io/thanos/internal/cortex/cortexpb.LabelAdapter"]; + double sampleValue = 2 [(gogoproto.jsontag) = "value"]; int64 timestamp = 3; - // @gotags: json:"histogram" - SampleHistogram histogram = 4; + SampleHistogram histogram = 4 [(gogoproto.nullable) = true, (gogoproto.jsontag) = "histogram"]; } message StringSample { @@ -148,7 +119,7 @@ message StringSample { message SampleHistogramPair { int64 timestamp = 1; - SampleHistogram histogram = 2; + SampleHistogram histogram = 2 [(gogoproto.nullable) = false]; } message SampleHistogram { @@ -165,25 +136,19 @@ message HistogramBucket { } message CachedResponse { - // @gotags: json:"key" - string key = 1; + string key = 1 [(gogoproto.jsontag) = "key"]; // List of cached responses; non-overlapping and in order. - // @gotags: json:"extents" - repeated Extent extents = 2; + repeated Extent extents = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "extents"]; } message Extent { - // @gotags: json:"start" - int64 start = 1; - // @gotags: json:"end" - int64 end = 2; + int64 start = 1 [(gogoproto.jsontag) = "start"]; + int64 end = 2 [(gogoproto.jsontag) = "end"]; // reserved the previous key to ensure cache transition reserved 3; - // @gotags: json:"-" - string trace_id = 4; - // @gotags: json:"response" - google.protobuf.Any response = 5; + string trace_id = 4 [(gogoproto.jsontag) = "-"]; + google.protobuf.Any response = 5 [(gogoproto.jsontag) = "response"]; } message CachingOptions { @@ -191,91 +156,12 @@ message CachingOptions { } message Explanation { - // @gotags: json:"name" - string name = 1; - // @gotags: json:"children" - repeated Explanation children = 2; -} - -// A Duration represents a signed, fixed-length span of time represented -// as a count of seconds and fractions of seconds at nanosecond -// resolution. It is independent of any calendar and concepts like "day" -// or "month". It is related to Timestamp in that the difference between -// two Timestamp values is a Duration and it can be added or subtracted -// from a Timestamp. Range is approximately +-10,000 years. -// -// # Examples -// -// Example 1: Compute Duration from two Timestamps in pseudo code. -// -// Timestamp start = ...; -// Timestamp end = ...; -// Duration duration = ...; -// -// duration.seconds = end.seconds - start.seconds; -// duration.nanos = end.nanos - start.nanos; -// -// if (duration.seconds < 0 && duration.nanos > 0) { -// duration.seconds += 1; -// duration.nanos -= 1000000000; -// } else if (duration.seconds > 0 && duration.nanos < 0) { -// duration.seconds -= 1; -// duration.nanos += 1000000000; -// } -// -// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. -// -// Timestamp start = ...; -// Duration duration = ...; -// Timestamp end = ...; -// -// end.seconds = start.seconds + duration.seconds; -// end.nanos = start.nanos + duration.nanos; -// -// if (end.nanos < 0) { -// end.seconds -= 1; -// end.nanos += 1000000000; -// } else if (end.nanos >= 1000000000) { -// end.seconds += 1; -// end.nanos -= 1000000000; -// } -// -// Example 3: Compute Duration from datetime.timedelta in Python. -// -// td = datetime.timedelta(days=3, minutes=10) -// duration = Duration() -// duration.FromTimedelta(td) -// -// # JSON Mapping -// -// In JSON format, the Duration type is encoded as a string rather than an -// object, where the string ends in the suffix "s" (indicating seconds) and -// is preceded by the number of seconds, with nanoseconds expressed as -// fractional seconds. For example, 3 seconds with 0 nanoseconds should be -// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -// microsecond should be expressed in JSON format as "3.000001s". -// -message Duration { - // Signed seconds of the span of time. Must be from -315,576,000,000 - // to +315,576,000,000 inclusive. Note: these bounds are computed from: - // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - int64 seconds = 1; - - // Signed fractions of a second at nanosecond resolution of the span - // of time. Durations less than one second are represented with a 0 - // `seconds` field and a positive or negative `nanos` field. For durations - // of one second or more, a non-zero value for the `nanos` field must be - // of the same sign as the `seconds` field. Must be from -999,999,999 - // to +999,999,999 inclusive. - int32 nanos = 2; + string name = 1 [(gogoproto.jsontag) = "name"]; + repeated Explanation children = 2 [(gogoproto.jsontag) = "children"]; } message Analysis { - // @gotags: json:"name" - string name = 1; - // @gotags: json:"executionTime" - Duration executionTime = 2; - // @gotags: json:"children" - repeated Analysis children = 3; + string name = 1 [(gogoproto.jsontag) = "name"]; + google.protobuf.Duration executionTime = 2 [(gogoproto.customtype) = "Duration", (gogoproto.nullable) = false, (gogoproto.jsontag) = "executionTime"]; + repeated Analysis children = 3 [(gogoproto.jsontag) = "children"]; } diff --git a/internal/cortex/querier/queryrange/results_cache.go b/internal/cortex/querier/queryrange/results_cache.go index 68281020d1..a3399e2551 100644 --- a/internal/cortex/querier/queryrange/results_cache.go +++ b/internal/cortex/querier/queryrange/results_cache.go @@ -7,13 +7,12 @@ import ( "context" "flag" "fmt" + "github.com/thanos-io/thanos/pkg/extpromql" "net/http" "sort" "strings" "time" - "github.com/thanos-io/thanos/pkg/extpromql" - "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/gogo/protobuf/proto" @@ -95,7 +94,7 @@ func (PrometheusResponseExtractor) Extract(start, end int64, from Response) Resp promRes := from.(*PrometheusResponse) return &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: promRes.Data.ResultType, Result: extractMatrix(start, end, promRes.Data.Result), Stats: extractStats(start, end, promRes.Data.Stats), @@ -112,7 +111,7 @@ func (PrometheusResponseExtractor) ResponseWithoutHeaders(resp Response) Respons promRes := resp.(*PrometheusResponse) return &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: promRes.Data.ResultType, Result: promRes.Data.Result, Stats: promRes.Data.Stats, @@ -127,7 +126,7 @@ func (PrometheusResponseExtractor) ResponseWithoutStats(resp Response) Response promRes := resp.(*PrometheusResponse) return &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: promRes.Data.ResultType, Result: promRes.Data.Result, Analysis: promRes.Data.Analysis, @@ -243,7 +242,7 @@ func (s resultsCache) Do(ctx context.Context, r Request) (Response, error) { var ( key = s.splitter.GenerateCacheKey(tenant.JoinTenantIDs(tenantIDs), r) - extents []*Extent + extents []Extent response Response ) @@ -405,7 +404,7 @@ func (s resultsCache) isOffsetCachable(r Request) bool { } func getHeaderValuesWithName(r Response, headerName string) (headerValues []string) { - for _, hv := range r.GetQueryRangeHeaders() { + for _, hv := range r.GetHeaders() { if hv.GetName() != headerName { continue } @@ -416,14 +415,14 @@ func getHeaderValuesWithName(r Response, headerName string) (headerValues []stri return } -func (s resultsCache) handleMiss(ctx context.Context, r Request, maxCacheTime int64) (Response, []*Extent, error) { +func (s resultsCache) handleMiss(ctx context.Context, r Request, maxCacheTime int64) (Response, []Extent, error) { response, err := s.next.Do(ctx, r) if err != nil { return nil, nil, err } if !s.shouldCacheResponse(ctx, r, response, maxCacheTime) { - return response, []*Extent{}, nil + return response, []Extent{}, nil } extent, err := toExtent(ctx, r, s.extractor.ResponseWithoutHeaders(response)) @@ -431,13 +430,13 @@ func (s resultsCache) handleMiss(ctx context.Context, r Request, maxCacheTime in return nil, nil, err } - extents := []*Extent{ + extents := []Extent{ extent, } return response, extents, nil } -func (s resultsCache) handleHit(ctx context.Context, r Request, extents []*Extent, maxCacheTime int64) (Response, []*Extent, error) { +func (s resultsCache) handleHit(ctx context.Context, r Request, extents []Extent, maxCacheTime int64) (Response, []Extent, error) { var ( reqResps []RequestResponse err error @@ -487,7 +486,7 @@ func (s resultsCache) handleHit(ctx context.Context, r Request, extents []*Exten if err != nil { return nil, nil, err } - mergedExtents := make([]*Extent, 0, len(extents)) + mergedExtents := make([]Extent, 0, len(extents)) for i := 1; i < len(extents); i++ { if accumulator.End+r.GetStep() < extents[i].Start { @@ -530,15 +529,15 @@ func (s resultsCache) handleHit(ctx context.Context, r Request, extents []*Exten type accumulator struct { Response - *Extent + Extent } -func merge(extents []*Extent, acc *accumulator) ([]*Extent, error) { +func merge(extents []Extent, acc *accumulator) ([]Extent, error) { any, err := types.MarshalAny(acc.Response) if err != nil { return nil, err } - return append(extents, &Extent{ + return append(extents, Extent{ Start: acc.Extent.Start, End: acc.Extent.End, Response: any, @@ -546,7 +545,7 @@ func merge(extents []*Extent, acc *accumulator) ([]*Extent, error) { }), nil } -func newAccumulator(base *Extent) (*accumulator, error) { +func newAccumulator(base Extent) (*accumulator, error) { res, err := base.toResponse() if err != nil { return nil, err @@ -557,12 +556,12 @@ func newAccumulator(base *Extent) (*accumulator, error) { }, nil } -func toExtent(ctx context.Context, req Request, res Response) (*Extent, error) { +func toExtent(ctx context.Context, req Request, res Response) (Extent, error) { any, err := types.MarshalAny(res) if err != nil { - return &Extent{}, err + return Extent{}, err } - return &Extent{ + return Extent{ Start: req.GetStart(), End: req.GetEnd(), Response: any, @@ -572,7 +571,7 @@ func toExtent(ctx context.Context, req Request, res Response) (*Extent, error) { // partition calculates the required requests to satisfy req given the cached data. // extents must be in order by start time. -func (s resultsCache) partition(req Request, extents []*Extent) ([]Request, []Response, error) { +func (s resultsCache) partition(req Request, extents []Extent) ([]Request, []Response, error) { var requests []Request var cachedResponses []Response start := req.GetStart() @@ -622,7 +621,7 @@ func (s resultsCache) partition(req Request, extents []*Extent) ([]Request, []Re return requests, cachedResponses, nil } -func (s resultsCache) filterRecentExtents(req Request, maxCacheFreshness time.Duration, extents []*Extent) ([]*Extent, error) { +func (s resultsCache) filterRecentExtents(req Request, maxCacheFreshness time.Duration, extents []Extent) ([]Extent, error) { maxCacheTime := (int64(model.Now().Add(-maxCacheFreshness)) / req.GetStep()) * req.GetStep() for i := range extents { // Never cache data for the latest freshness period. @@ -643,7 +642,7 @@ func (s resultsCache) filterRecentExtents(req Request, maxCacheFreshness time.Du return extents, nil } -func (s resultsCache) get(ctx context.Context, key string) ([]*Extent, bool) { +func (s resultsCache) get(ctx context.Context, key string) ([]Extent, bool) { found, bufs, _ := s.cache.Fetch(ctx, []string{cache.HashKey(key)}) if len(found) != 1 { return nil, false @@ -675,7 +674,7 @@ func (s resultsCache) get(ctx context.Context, key string) ([]*Extent, bool) { return resp.Extents, true } -func (s resultsCache) put(ctx context.Context, key string, extents []*Extent) { +func (s resultsCache) put(ctx context.Context, key string, extents []Extent) { buf, err := proto.Marshal(&CachedResponse{ Key: key, Extents: extents, @@ -719,8 +718,8 @@ func extractStats(start, end int64, stats *PrometheusResponseStats) *PrometheusR return result } -func extractMatrix(start, end int64, matrix []*SampleStream) []*SampleStream { - result := make([]*SampleStream, 0, len(matrix)) +func extractMatrix(start, end int64, matrix []SampleStream) []SampleStream { + result := make([]SampleStream, 0, len(matrix)) for _, stream := range matrix { extracted, ok := extractSampleStream(start, end, stream) if ok { @@ -730,17 +729,17 @@ func extractMatrix(start, end int64, matrix []*SampleStream) []*SampleStream { return result } -func extractSampleStream(start, end int64, stream *SampleStream) (*SampleStream, bool) { - result := &SampleStream{ +func extractSampleStream(start, end int64, stream SampleStream) (SampleStream, bool) { + result := SampleStream{ Labels: stream.Labels, } if len(stream.Samples) > 0 { - result.Samples = make([]*cortexpb.Sample, 0, len(stream.Samples)) + result.Samples = make([]cortexpb.Sample, 0, len(stream.Samples)) } if len(stream.Histograms) > 0 { - result.Histograms = make([]*SampleHistogramPair, 0, len(stream.Histograms)) + result.Histograms = make([]SampleHistogramPair, 0, len(stream.Histograms)) } for _, sample := range stream.Samples { @@ -754,7 +753,7 @@ func extractSampleStream(start, end int64, stream *SampleStream) (*SampleStream, } } if len(result.Samples) == 0 && len(result.Histograms) == 0 { - return &SampleStream{}, false + return SampleStream{}, false } return result, true } diff --git a/internal/cortex/querier/queryrange/results_cache_test.go b/internal/cortex/querier/queryrange/results_cache_test.go index bb6bee5c2d..7527b6ad11 100644 --- a/internal/cortex/querier/queryrange/results_cache_test.go +++ b/internal/cortex/querier/queryrange/results_cache_test.go @@ -57,7 +57,7 @@ var ( End: 1536716898 * 1e3, Step: 120 * 1e3, Query: "sum(container_memory_rss) by (namespace)", - CachingOptions: &CachingOptions{Disabled: true}, + CachingOptions: CachingOptions{Disabled: true}, } noCacheRequestWithStats = &PrometheusRequest{ Path: "/api/v1/query_range", @@ -66,7 +66,7 @@ var ( Step: 120 * 1e3, Stats: "all", Query: "sum(container_memory_rss) by (namespace)", - CachingOptions: &CachingOptions{Disabled: true}, + CachingOptions: CachingOptions{Disabled: true}, } respHeaders = []*PrometheusResponseHeader{ { @@ -76,15 +76,15 @@ var ( } parsedResponse = &PrometheusResponse{ Status: "success", - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: model.ValMatrix.String(), Analysis: (*Analysis)(nil), - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{ - {Name: []byte("foo"), Value: []byte("bar")}, + Labels: []cortexpb.LabelAdapter{ + {Name: "foo", Value: "bar"}, }, - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {Value: 137, TimestampMs: 1536673680000}, {Value: 137, TimestampMs: 1536673780000}, }, @@ -95,15 +95,15 @@ var ( } parsedHistogramResponse = &PrometheusResponse{ Status: "success", - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: model.ValMatrix.String(), Analysis: (*Analysis)(nil), - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{ - {Name: []byte("fake"), Value: []byte("histogram")}, + Labels: []cortexpb.LabelAdapter{ + {Name: "fake", Value: "histogram"}, }, - Histograms: []*SampleHistogramPair{ + Histograms: []SampleHistogramPair{ { Timestamp: 1536673680000, Histogram: genSampleHistogram(), @@ -120,13 +120,13 @@ func mkAPIResponse(start, end, step int64) *PrometheusResponse { } func mkAPIResponseWithStats(start, end, step int64, withStats bool) *PrometheusResponse { - var samples []*cortexpb.Sample + var samples []cortexpb.Sample var stats *PrometheusResponseStats if withStats { stats = &PrometheusResponseStats{Samples: &PrometheusResponseSamplesStats{}} } for i := start; i <= end; i += step { - samples = append(samples, &cortexpb.Sample{ + samples = append(samples, cortexpb.Sample{ TimestampMs: int64(i), Value: float64(i), }) @@ -143,14 +143,14 @@ func mkAPIResponseWithStats(start, end, step int64, withStats bool) *PrometheusR return &PrometheusResponse{ Status: StatusSuccess, - Data: &PrometheusData{ + Data: PrometheusData{ ResultType: matrix, Stats: stats, Analysis: &Analysis{}, - Result: []*SampleStream{ + Result: []SampleStream{ { - Labels: []*cortexpb.LabelPair{ - {Name: []byte("foo"), Value: []byte("bar")}, + Labels: []cortexpb.LabelAdapter{ + {Name: "foo", Value: "bar"}, }, Samples: samples, }, @@ -159,25 +159,25 @@ func mkAPIResponseWithStats(start, end, step int64, withStats bool) *PrometheusR } } -func mkExtentWithStats(start, end int64) *Extent { +func mkExtentWithStats(start, end int64) Extent { return mkExtentWithStepWithStats(start, end, 10, true) } -func mkExtent(start, end int64) *Extent { +func mkExtent(start, end int64) Extent { return mkExtentWithStepWithStats(start, end, 10, false) } -func mkExtentWithStep(start, end, step int64) *Extent { +func mkExtentWithStep(start, end, step int64) Extent { return mkExtentWithStepWithStats(start, end, step, false) } -func mkExtentWithStepWithStats(start, end, step int64, withStats bool) *Extent { +func mkExtentWithStepWithStats(start, end, step int64, withStats bool) Extent { res := mkAPIResponseWithStats(start, end, step, withStats) any, err := types.MarshalAny(res) if err != nil { panic(err) } - return &Extent{ + return Extent{ Start: start, End: end, Response: any, @@ -590,7 +590,7 @@ func TestPartition(t *testing.T) { for _, tc := range []struct { name string input Request - prevCachedResponse []*Extent + prevCachedResponse []Extent expectedRequests []Request expectedCachedResponse []Response }{ @@ -600,7 +600,7 @@ func TestPartition(t *testing.T) { Start: 0, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(0, 100), }, expectedCachedResponse: []Response{ @@ -614,7 +614,7 @@ func TestPartition(t *testing.T) { Start: 0, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(110, 210), }, expectedRequests: []Request{ @@ -629,7 +629,7 @@ func TestPartition(t *testing.T) { Start: 0, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(50, 100), }, expectedRequests: []Request{ @@ -648,7 +648,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 200, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(50, 120), mkExtent(160, 250), }, @@ -669,7 +669,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 160, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(50, 120), mkExtent(122, 130), }, @@ -689,7 +689,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(50, 90), }, expectedRequests: []Request{ @@ -706,7 +706,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtent(100, 100), }, expectedCachedResponse: []Response{ @@ -719,7 +719,7 @@ func TestPartition(t *testing.T) { Start: 0, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(0, 100), }, expectedCachedResponse: []Response{ @@ -733,7 +733,7 @@ func TestPartition(t *testing.T) { Start: 0, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(110, 210), }, expectedRequests: []Request{ @@ -748,7 +748,7 @@ func TestPartition(t *testing.T) { Start: 0, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(50, 100), }, expectedRequests: []Request{ @@ -767,7 +767,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 200, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(50, 120), mkExtentWithStats(160, 250), }, @@ -788,7 +788,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 160, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(50, 120), mkExtentWithStats(122, 130), }, @@ -808,7 +808,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(50, 90), }, expectedRequests: []Request{ @@ -825,7 +825,7 @@ func TestPartition(t *testing.T) { Start: 100, End: 100, }, - prevCachedResponse: []*Extent{ + prevCachedResponse: []Extent{ mkExtentWithStats(100, 100), }, expectedCachedResponse: []Response{ @@ -850,8 +850,8 @@ func TestHandleHit(t *testing.T) { for _, tc := range []struct { name string input Request - cachedEntry []*Extent - expectedUpdatedCachedEntry []*Extent + cachedEntry []Extent + expectedUpdatedCachedEntry []Extent }{ { name: "Should drop tiny extent that overlaps with non-tiny request only", @@ -860,14 +860,14 @@ func TestHandleHit(t *testing.T) { End: 120, Step: 5, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(0, 50, 5), mkExtentWithStep(60, 65, 5), mkExtentWithStep(100, 105, 5), mkExtentWithStep(110, 150, 5), mkExtentWithStep(160, 165, 5), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(0, 50, 5), mkExtentWithStep(60, 65, 5), mkExtentWithStep(100, 150, 5), @@ -881,7 +881,7 @@ func TestHandleHit(t *testing.T) { End: 200, Step: 5, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(0, 50, 5), mkExtentWithStep(60, 65, 5), mkExtentWithStep(100, 105, 5), @@ -890,7 +890,7 @@ func TestHandleHit(t *testing.T) { mkExtentWithStep(220, 225, 5), mkExtentWithStep(240, 250, 5), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(0, 50, 5), mkExtentWithStep(60, 65, 5), mkExtentWithStep(100, 200, 5), @@ -905,7 +905,7 @@ func TestHandleHit(t *testing.T) { End: 105, Step: 5, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(0, 50, 5), mkExtentWithStep(60, 65, 5), mkExtentWithStep(100, 105, 5), @@ -920,7 +920,7 @@ func TestHandleHit(t *testing.T) { End: 108, Step: 2, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(60, 64, 2), mkExtentWithStep(104, 110, 2), mkExtentWithStep(160, 166, 2), @@ -934,12 +934,12 @@ func TestHandleHit(t *testing.T) { End: 106, Step: 2, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(60, 64, 2), mkExtentWithStep(104, 110, 2), mkExtentWithStep(160, 166, 2), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(60, 64, 2), mkExtentWithStep(100, 110, 2), mkExtentWithStep(160, 166, 2), @@ -952,12 +952,12 @@ func TestHandleHit(t *testing.T) { End: 106, Step: 2, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(60, 64, 2), mkExtentWithStep(98, 102, 2), mkExtentWithStep(160, 166, 2), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(60, 64, 2), mkExtentWithStep(98, 106, 2), mkExtentWithStep(160, 166, 2), @@ -970,11 +970,11 @@ func TestHandleHit(t *testing.T) { End: 80, Step: 20, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(0, 20, 20), mkExtentWithStep(80, 100, 20), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(0, 100, 20), }, }, @@ -985,10 +985,10 @@ func TestHandleHit(t *testing.T) { End: 80, Step: 20, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(60, 160, 20), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(40, 160, 20), }, }, @@ -999,10 +999,10 @@ func TestHandleHit(t *testing.T) { End: 180, Step: 20, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ mkExtentWithStep(60, 160, 20), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(60, 180, 20), }, }, @@ -1015,7 +1015,7 @@ func TestHandleHit(t *testing.T) { End: 180, Step: 20, }, - cachedEntry: []*Extent{ + cachedEntry: []Extent{ { Start: 60, End: 80, @@ -1027,7 +1027,7 @@ func TestHandleHit(t *testing.T) { }, mkExtentWithStep(60, 160, 20), }, - expectedUpdatedCachedEntry: []*Extent{ + expectedUpdatedCachedEntry: []Extent{ mkExtentWithStep(60, 180, 20), }, }, @@ -1190,7 +1190,7 @@ func TestResultsCacheMaxFreshness(t *testing.T) { // fill cache key := constSplitter(day).GenerateCacheKey("1", req) - rc.(*resultsCache).put(ctx, key, []*Extent{mkExtent(int64(modelNow)-(600*1e3), int64(modelNow))}) + rc.(*resultsCache).put(ctx, key, []Extent{mkExtent(int64(modelNow)-(600*1e3), int64(modelNow))}) resp, err := rc.Do(ctx, req) require.NoError(t, err) @@ -1221,13 +1221,13 @@ func Test_resultsCache_MissingData(t *testing.T) { ctx := context.Background() // fill up the cache - rc.put(ctx, "empty", []*Extent{{ + rc.put(ctx, "empty", []Extent{{ Start: 100, End: 200, Response: nil, }}) - rc.put(ctx, "notempty", []*Extent{mkExtent(100, 120)}) - rc.put(ctx, "mixed", []*Extent{mkExtent(100, 120), { + rc.put(ctx, "notempty", []Extent{mkExtent(100, 120)}) + rc.put(ctx, "mixed", []Extent{mkExtent(100, 120), { Start: 120, End: 200, Response: nil, @@ -1399,8 +1399,8 @@ func (mockCacheGenNumberLoader) GetResultsCacheGenNumber(tenantIDs []string) str return "" } -func genSampleHistogram() *SampleHistogram { - return &SampleHistogram{ +func genSampleHistogram() SampleHistogram { + return SampleHistogram{ Count: 5, Sum: 18.4, Buckets: []*HistogramBucket{ diff --git a/internal/cortex/querier/queryrange/split_by_interval_test.go b/internal/cortex/querier/queryrange/split_by_interval_test.go index 25b8fcf1b3..00654a911c 100644 --- a/internal/cortex/querier/queryrange/split_by_interval_test.go +++ b/internal/cortex/querier/queryrange/split_by_interval_test.go @@ -13,8 +13,6 @@ import ( "testing" "time" - "github.com/prometheus/common/model" - "github.com/thanos-io/thanos/internal/cortex/cortexpb" "github.com/thanos-io/thanos/pkg/extpromql" "github.com/stretchr/testify/require" @@ -269,27 +267,6 @@ func TestSplitQuery(t *testing.T) { } func TestSplitByDay(t *testing.T) { - // NOTE(GiedriusS): we need to copy this because the original is modified. - parsedResponse := &PrometheusResponse{ - Status: "success", - Data: &PrometheusData{ - ResultType: model.ValMatrix.String(), - Analysis: (*Analysis)(nil), - Result: []*SampleStream{ - { - Labels: []*cortexpb.LabelPair{ - {Name: []byte("foo"), Value: []byte("bar")}, - }, - Samples: []*cortexpb.Sample{ - {Value: 137, TimestampMs: 1536673680000}, - {Value: 137, TimestampMs: 1536673780000}, - }, - }, - }, - }, - Warnings: []string{"test-warn"}, - } - mergedResponse, err := PrometheusCodec.MergeResponse(nil, parsedResponse, parsedResponse) require.NoError(t, err) diff --git a/internal/cortex/querier/stats/stats.proto b/internal/cortex/querier/stats/stats.proto index 51993f3b7b..6c462169a2 100644 --- a/internal/cortex/querier/stats/stats.proto +++ b/internal/cortex/querier/stats/stats.proto @@ -7,11 +7,15 @@ package stats; option go_package = "stats"; +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; import "google/protobuf/duration.proto"; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; + message Stats { // The sum of all wall time spent in the querier to execute the query. - google.protobuf.Duration wall_time = 1; + google.protobuf.Duration wall_time = 1 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; // The number of series fetched for the query uint64 fetched_series_count = 2; // The number of bytes of the chunks fetched for the query diff --git a/pkg/api/query/grpc.go b/pkg/api/query/grpc.go index 5d613ddadd..7d43022f53 100644 --- a/pkg/api/query/grpc.go +++ b/pkg/api/query/grpc.go @@ -121,7 +121,7 @@ func (g *GRPCAPI) Query(request *querypb.QueryRequest, server querypb.Query_Quer switch vector := result.Value.(type) { case promql.Scalar: series := &prompb.TimeSeries{ - Samples: []*prompb.Sample{{Value: vector.V, Timestamp: vector.T}}, + Samples: []prompb.Sample{{Value: vector.V, Timestamp: vector.T}}, } if err := server.Send(querypb.NewQueryResponse(series)); err != nil { return err @@ -265,7 +265,7 @@ func (g *GRPCAPI) QueryRange(request *querypb.QueryRangeRequest, srv querypb.Que } case promql.Scalar: series := &prompb.TimeSeries{ - Samples: []*prompb.Sample{{Value: value.V, Timestamp: value.T}}, + Samples: []prompb.Sample{{Value: value.V, Timestamp: value.T}}, } if err := srv.Send(querypb.NewQueryRangeResponse(series)); err != nil { return err diff --git a/pkg/api/query/querypb/query.pb.go b/pkg/api/query/querypb/query.pb.go index ac1f87a00f..ebeec71d80 100644 --- a/pkg/api/query/querypb/query.pb.go +++ b/pkg/api/query/querypb/query.pb.go @@ -10,6 +10,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" storepb "github.com/thanos-io/thanos/pkg/store/storepb" prompb "github.com/thanos-io/thanos/pkg/store/storepb/prompb" @@ -58,11 +59,8 @@ func (EngineType) EnumDescriptor() ([]byte, []int) { } type QueryStats struct { - SamplesTotal int64 `protobuf:"varint,1,opt,name=samples_total,json=samplesTotal,proto3" json:"samples_total,omitempty"` - PeakSamples int64 `protobuf:"varint,2,opt,name=peak_samples,json=peakSamples,proto3" json:"peak_samples,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + SamplesTotal int64 `protobuf:"varint,1,opt,name=samples_total,json=samplesTotal,proto3" json:"samples_total,omitempty"` + PeakSamples int64 `protobuf:"varint,2,opt,name=peak_samples,json=peakSamples,proto3" json:"peak_samples,omitempty"` } func (m *QueryStats) Reset() { *m = QueryStats{} } @@ -98,20 +96,6 @@ func (m *QueryStats) XXX_DiscardUnknown() { var xxx_messageInfo_QueryStats proto.InternalMessageInfo -func (m *QueryStats) GetSamplesTotal() int64 { - if m != nil { - return m.SamplesTotal - } - return 0 -} - -func (m *QueryStats) GetPeakSamples() int64 { - if m != nil { - return m.PeakSamples - } - return 0 -} - type QueryRequest struct { Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` QueryPlan *QueryPlan `protobuf:"bytes,14,opt,name=queryPlan,proto3" json:"queryPlan,omitempty"` @@ -119,16 +103,13 @@ type QueryRequest struct { TimeoutSeconds int64 `protobuf:"varint,3,opt,name=timeout_seconds,json=timeoutSeconds,proto3" json:"timeout_seconds,omitempty"` MaxResolutionSeconds int64 `protobuf:"varint,4,opt,name=max_resolution_seconds,json=maxResolutionSeconds,proto3" json:"max_resolution_seconds,omitempty"` ReplicaLabels []string `protobuf:"bytes,5,rep,name=replica_labels,json=replicaLabels,proto3" json:"replica_labels,omitempty"` - StoreMatchers []*StoreMatchers `protobuf:"bytes,6,rep,name=storeMatchers,proto3" json:"storeMatchers,omitempty"` + StoreMatchers []StoreMatchers `protobuf:"bytes,6,rep,name=storeMatchers,proto3" json:"storeMatchers"` EnableDedup bool `protobuf:"varint,7,opt,name=enableDedup,proto3" json:"enableDedup,omitempty"` EnablePartialResponse bool `protobuf:"varint,8,opt,name=enablePartialResponse,proto3" json:"enablePartialResponse,omitempty"` SkipChunks bool `protobuf:"varint,10,opt,name=skipChunks,proto3" json:"skipChunks,omitempty"` ShardInfo *storepb.ShardInfo `protobuf:"bytes,11,opt,name=shard_info,json=shardInfo,proto3" json:"shard_info,omitempty"` LookbackDeltaSeconds int64 `protobuf:"varint,12,opt,name=lookback_delta_seconds,json=lookbackDeltaSeconds,proto3" json:"lookback_delta_seconds,omitempty"` Engine EngineType `protobuf:"varint,13,opt,name=engine,proto3,enum=thanos.EngineType" json:"engine,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *QueryRequest) Reset() { *m = QueryRequest{} } @@ -164,102 +145,8 @@ func (m *QueryRequest) XXX_DiscardUnknown() { var xxx_messageInfo_QueryRequest proto.InternalMessageInfo -func (m *QueryRequest) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -func (m *QueryRequest) GetQueryPlan() *QueryPlan { - if m != nil { - return m.QueryPlan - } - return nil -} - -func (m *QueryRequest) GetTimeSeconds() int64 { - if m != nil { - return m.TimeSeconds - } - return 0 -} - -func (m *QueryRequest) GetTimeoutSeconds() int64 { - if m != nil { - return m.TimeoutSeconds - } - return 0 -} - -func (m *QueryRequest) GetMaxResolutionSeconds() int64 { - if m != nil { - return m.MaxResolutionSeconds - } - return 0 -} - -func (m *QueryRequest) GetReplicaLabels() []string { - if m != nil { - return m.ReplicaLabels - } - return nil -} - -func (m *QueryRequest) GetStoreMatchers() []*StoreMatchers { - if m != nil { - return m.StoreMatchers - } - return nil -} - -func (m *QueryRequest) GetEnableDedup() bool { - if m != nil { - return m.EnableDedup - } - return false -} - -func (m *QueryRequest) GetEnablePartialResponse() bool { - if m != nil { - return m.EnablePartialResponse - } - return false -} - -func (m *QueryRequest) GetSkipChunks() bool { - if m != nil { - return m.SkipChunks - } - return false -} - -func (m *QueryRequest) GetShardInfo() *storepb.ShardInfo { - if m != nil { - return m.ShardInfo - } - return nil -} - -func (m *QueryRequest) GetLookbackDeltaSeconds() int64 { - if m != nil { - return m.LookbackDeltaSeconds - } - return 0 -} - -func (m *QueryRequest) GetEngine() EngineType { - if m != nil { - return m.Engine - } - return EngineType_default -} - type StoreMatchers struct { - LabelMatchers []*storepb.LabelMatcher `protobuf:"bytes,1,rep,name=labelMatchers,proto3" json:"labelMatchers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + LabelMatchers []storepb.LabelMatcher `protobuf:"bytes,1,rep,name=labelMatchers,proto3" json:"labelMatchers"` } func (m *StoreMatchers) Reset() { *m = StoreMatchers{} } @@ -295,22 +182,12 @@ func (m *StoreMatchers) XXX_DiscardUnknown() { var xxx_messageInfo_StoreMatchers proto.InternalMessageInfo -func (m *StoreMatchers) GetLabelMatchers() []*storepb.LabelMatcher { - if m != nil { - return m.LabelMatchers - } - return nil -} - type QueryResponse struct { // Types that are valid to be assigned to Result: // *QueryResponse_Warnings // *QueryResponse_Timeseries // *QueryResponse_Stats - Result isQueryResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isQueryResponse_Result `protobuf_oneof:"result"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } @@ -406,10 +283,7 @@ func (*QueryResponse) XXX_OneofWrappers() []interface{} { type QueryPlan struct { // Types that are valid to be assigned to Encoding: // *QueryPlan_Json - Encoding isQueryPlan_Encoding `protobuf_oneof:"encoding"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Encoding isQueryPlan_Encoding `protobuf_oneof:"encoding"` } func (m *QueryPlan) Reset() { *m = QueryPlan{} } @@ -487,16 +361,13 @@ type QueryRangeRequest struct { TimeoutSeconds int64 `protobuf:"varint,5,opt,name=timeout_seconds,json=timeoutSeconds,proto3" json:"timeout_seconds,omitempty"` MaxResolutionSeconds int64 `protobuf:"varint,6,opt,name=max_resolution_seconds,json=maxResolutionSeconds,proto3" json:"max_resolution_seconds,omitempty"` ReplicaLabels []string `protobuf:"bytes,7,rep,name=replica_labels,json=replicaLabels,proto3" json:"replica_labels,omitempty"` - StoreMatchers []*StoreMatchers `protobuf:"bytes,8,rep,name=storeMatchers,proto3" json:"storeMatchers,omitempty"` + StoreMatchers []StoreMatchers `protobuf:"bytes,8,rep,name=storeMatchers,proto3" json:"storeMatchers"` EnableDedup bool `protobuf:"varint,9,opt,name=enableDedup,proto3" json:"enableDedup,omitempty"` EnablePartialResponse bool `protobuf:"varint,10,opt,name=enablePartialResponse,proto3" json:"enablePartialResponse,omitempty"` SkipChunks bool `protobuf:"varint,12,opt,name=skipChunks,proto3" json:"skipChunks,omitempty"` ShardInfo *storepb.ShardInfo `protobuf:"bytes,13,opt,name=shard_info,json=shardInfo,proto3" json:"shard_info,omitempty"` LookbackDeltaSeconds int64 `protobuf:"varint,14,opt,name=lookback_delta_seconds,json=lookbackDeltaSeconds,proto3" json:"lookback_delta_seconds,omitempty"` Engine EngineType `protobuf:"varint,15,opt,name=engine,proto3,enum=thanos.EngineType" json:"engine,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *QueryRangeRequest) Reset() { *m = QueryRangeRequest{} } @@ -532,120 +403,12 @@ func (m *QueryRangeRequest) XXX_DiscardUnknown() { var xxx_messageInfo_QueryRangeRequest proto.InternalMessageInfo -func (m *QueryRangeRequest) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -func (m *QueryRangeRequest) GetQueryPlan() *QueryPlan { - if m != nil { - return m.QueryPlan - } - return nil -} - -func (m *QueryRangeRequest) GetStartTimeSeconds() int64 { - if m != nil { - return m.StartTimeSeconds - } - return 0 -} - -func (m *QueryRangeRequest) GetEndTimeSeconds() int64 { - if m != nil { - return m.EndTimeSeconds - } - return 0 -} - -func (m *QueryRangeRequest) GetIntervalSeconds() int64 { - if m != nil { - return m.IntervalSeconds - } - return 0 -} - -func (m *QueryRangeRequest) GetTimeoutSeconds() int64 { - if m != nil { - return m.TimeoutSeconds - } - return 0 -} - -func (m *QueryRangeRequest) GetMaxResolutionSeconds() int64 { - if m != nil { - return m.MaxResolutionSeconds - } - return 0 -} - -func (m *QueryRangeRequest) GetReplicaLabels() []string { - if m != nil { - return m.ReplicaLabels - } - return nil -} - -func (m *QueryRangeRequest) GetStoreMatchers() []*StoreMatchers { - if m != nil { - return m.StoreMatchers - } - return nil -} - -func (m *QueryRangeRequest) GetEnableDedup() bool { - if m != nil { - return m.EnableDedup - } - return false -} - -func (m *QueryRangeRequest) GetEnablePartialResponse() bool { - if m != nil { - return m.EnablePartialResponse - } - return false -} - -func (m *QueryRangeRequest) GetSkipChunks() bool { - if m != nil { - return m.SkipChunks - } - return false -} - -func (m *QueryRangeRequest) GetShardInfo() *storepb.ShardInfo { - if m != nil { - return m.ShardInfo - } - return nil -} - -func (m *QueryRangeRequest) GetLookbackDeltaSeconds() int64 { - if m != nil { - return m.LookbackDeltaSeconds - } - return 0 -} - -func (m *QueryRangeRequest) GetEngine() EngineType { - if m != nil { - return m.Engine - } - return EngineType_default -} - type QueryRangeResponse struct { // Types that are valid to be assigned to Result: // *QueryRangeResponse_Warnings // *QueryRangeResponse_Timeseries // *QueryRangeResponse_Stats - Result isQueryRangeResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isQueryRangeResponse_Result `protobuf_oneof:"result"` } func (m *QueryRangeResponse) Reset() { *m = QueryRangeResponse{} } @@ -752,58 +515,60 @@ func init() { func init() { proto.RegisterFile("api/query/querypb/query.proto", fileDescriptor_4b2aba43925d729f) } var fileDescriptor_4b2aba43925d729f = []byte{ - // 810 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x96, 0xdf, 0x6e, 0xe3, 0x44, - 0x14, 0xc6, 0xe3, 0x6d, 0x93, 0x26, 0xc7, 0x71, 0xea, 0x1d, 0xa5, 0xe0, 0x0d, 0x50, 0x99, 0x20, - 0x44, 0xa8, 0xa0, 0x5d, 0x85, 0x85, 0x0b, 0x10, 0x37, 0x65, 0x57, 0x0a, 0xb0, 0x48, 0xcb, 0x24, - 0x57, 0xdc, 0x58, 0x93, 0xf8, 0x34, 0x31, 0x71, 0x66, 0xbc, 0x9e, 0x31, 0x6c, 0x5e, 0x80, 0x07, - 0xe1, 0x86, 0x7b, 0x9e, 0x82, 0x1b, 0x24, 0x1e, 0x01, 0xf5, 0x49, 0x90, 0xc7, 0x7f, 0x62, 0x57, - 0x15, 0x24, 0x70, 0xc3, 0x8d, 0xeb, 0xf9, 0xbe, 0xcf, 0x33, 0x3d, 0x27, 0xe7, 0x27, 0x1b, 0xde, - 0x62, 0x51, 0x70, 0xf5, 0x32, 0xc1, 0x78, 0x9b, 0x5d, 0xa3, 0x79, 0xf6, 0xf7, 0x32, 0x8a, 0x85, - 0x12, 0xa4, 0xa5, 0x56, 0x8c, 0x0b, 0x39, 0x78, 0x24, 0x95, 0x88, 0xf1, 0x4a, 0x5f, 0xa3, 0xf9, - 0x95, 0xda, 0x46, 0x28, 0xb3, 0xc8, 0xe0, 0xf5, 0xba, 0x15, 0x47, 0x8b, 0xdc, 0x70, 0xeb, 0x46, - 0x14, 0x8b, 0x4d, 0xfd, 0xd1, 0xe1, 0x0c, 0xe0, 0xdb, 0xf4, 0xb0, 0xa9, 0x62, 0x4a, 0x92, 0x77, - 0xc0, 0x92, 0x6c, 0x13, 0x85, 0x28, 0x3d, 0x25, 0x14, 0x0b, 0x1d, 0xc3, 0x35, 0x46, 0x47, 0xb4, - 0x9b, 0x8b, 0xb3, 0x54, 0x23, 0x6f, 0x43, 0x37, 0x42, 0xb6, 0xf6, 0x72, 0xd1, 0x79, 0xa0, 0x33, - 0x66, 0xaa, 0x4d, 0x33, 0x69, 0xf8, 0xfb, 0x31, 0x74, 0xf5, 0xb6, 0x14, 0x5f, 0x26, 0x28, 0x15, - 0xe9, 0x43, 0x53, 0xd7, 0xa4, 0x37, 0xec, 0xd0, 0x6c, 0x41, 0xae, 0xa0, 0xa3, 0x6f, 0x5e, 0x84, - 0x8c, 0x3b, 0x3d, 0xd7, 0x18, 0x99, 0xe3, 0x87, 0x97, 0x59, 0xb9, 0x97, 0xa5, 0x41, 0x77, 0x99, - 0xf4, 0x68, 0x15, 0x6c, 0xd0, 0x93, 0xb8, 0x10, 0xdc, 0x2f, 0x8f, 0x4e, 0xb5, 0x69, 0x26, 0x91, - 0xf7, 0xe0, 0x34, 0x5d, 0x8a, 0x44, 0x95, 0xa9, 0x23, 0x9d, 0xea, 0xe5, 0x72, 0x11, 0x7c, 0x02, - 0xaf, 0x6d, 0xd8, 0x2b, 0x2f, 0x46, 0x29, 0xc2, 0x44, 0x05, 0x82, 0x97, 0xf9, 0x63, 0x9d, 0xef, - 0x6f, 0xd8, 0x2b, 0x5a, 0x9a, 0xc5, 0x53, 0xef, 0x42, 0x2f, 0xc6, 0x28, 0x0c, 0x16, 0xcc, 0x0b, - 0xd9, 0x1c, 0x43, 0xe9, 0x34, 0xdd, 0xa3, 0x51, 0x87, 0x5a, 0xb9, 0xfa, 0x5c, 0x8b, 0xe4, 0x33, - 0xb0, 0x74, 0xd3, 0xbf, 0x61, 0x6a, 0xb1, 0xc2, 0x58, 0x3a, 0x2d, 0xf7, 0x68, 0x64, 0x8e, 0xcf, - 0x8a, 0xea, 0xa6, 0x55, 0x93, 0xd6, 0xb3, 0xc4, 0x05, 0x13, 0x39, 0x9b, 0x87, 0xf8, 0x14, 0xfd, - 0x24, 0x72, 0x4e, 0x5c, 0x63, 0xd4, 0xa6, 0x55, 0x89, 0x3c, 0x81, 0xb3, 0x6c, 0xf9, 0x82, 0xc5, - 0x2a, 0x60, 0x21, 0x45, 0x19, 0x09, 0x2e, 0xd1, 0x69, 0xeb, 0xec, 0xfd, 0x26, 0x39, 0x07, 0x90, - 0xeb, 0x20, 0xfa, 0x62, 0x95, 0xf0, 0xb5, 0x74, 0x40, 0x47, 0x2b, 0x0a, 0x79, 0x0c, 0x20, 0x57, - 0x2c, 0xf6, 0xbd, 0x80, 0xdf, 0x08, 0xc7, 0xac, 0xff, 0x1e, 0xd3, 0xd4, 0xf9, 0x92, 0xdf, 0x08, - 0xda, 0x91, 0xc5, 0x6d, 0xda, 0xc3, 0x50, 0x88, 0xf5, 0x9c, 0x2d, 0xd6, 0x9e, 0x8f, 0xa1, 0x62, - 0x65, 0x0f, 0xbb, 0x59, 0x0f, 0x0b, 0xf7, 0x69, 0x6a, 0x16, 0x3d, 0xbc, 0x80, 0x16, 0xf2, 0x65, - 0xc0, 0xd1, 0xb1, 0x5c, 0x63, 0xd4, 0x1b, 0x93, 0xe2, 0x8c, 0x67, 0x5a, 0x9d, 0x6d, 0x23, 0xa4, - 0x79, 0xe2, 0xab, 0xe3, 0x76, 0xc7, 0x86, 0xe1, 0xd7, 0x60, 0xd5, 0x3a, 0x46, 0x3e, 0x05, 0x4b, - 0xb7, 0xbf, 0xec, 0xaf, 0xa1, 0xfb, 0xdb, 0x2f, 0x76, 0x7a, 0x5e, 0x31, 0x69, 0x3d, 0x3a, 0xfc, - 0xd9, 0x00, 0x2b, 0x1f, 0xce, 0xbc, 0x31, 0x6f, 0x42, 0xfb, 0x47, 0x16, 0xf3, 0x80, 0x2f, 0x65, - 0x36, 0xa0, 0x93, 0x06, 0x2d, 0x15, 0xf2, 0x39, 0x40, 0x3a, 0x3a, 0x12, 0xe3, 0x20, 0x9f, 0x76, - 0x73, 0xfc, 0x46, 0x8a, 0xcf, 0x06, 0xd5, 0x0a, 0x13, 0xe9, 0x2d, 0x44, 0xb4, 0xbd, 0x9c, 0xe9, - 0x19, 0x4c, 0x23, 0x93, 0x06, 0xad, 0x3c, 0x40, 0x2e, 0xa0, 0x29, 0x53, 0xb8, 0xf4, 0x18, 0x9a, - 0xbb, 0x62, 0x77, 0xd8, 0x4d, 0x1a, 0x34, 0x8b, 0x5c, 0xb7, 0xa1, 0x15, 0xa3, 0x4c, 0x42, 0x35, - 0xfc, 0xb0, 0x82, 0x06, 0xe9, 0xc3, 0xf1, 0xf7, 0x52, 0x70, 0xfd, 0xbf, 0x75, 0x27, 0x0d, 0xaa, - 0x57, 0xd7, 0x00, 0x6d, 0xe4, 0x0b, 0xe1, 0x07, 0x7c, 0x39, 0xfc, 0xb5, 0x09, 0x0f, 0xb3, 0x9a, - 0x18, 0x5f, 0xe2, 0x01, 0xd4, 0xd9, 0x7b, 0x50, 0xf7, 0x01, 0x10, 0xa9, 0x58, 0xac, 0xbc, 0x7b, - 0xd8, 0xb3, 0xb5, 0x33, 0xab, 0x00, 0x38, 0x02, 0x1b, 0xb9, 0x5f, 0xcf, 0xe6, 0x04, 0x22, 0xf7, - 0xab, 0xc9, 0xf7, 0xc1, 0x0e, 0xb8, 0xc2, 0xf8, 0x07, 0x16, 0xde, 0x61, 0xef, 0xb4, 0xd0, 0xff, - 0x86, 0xea, 0xe6, 0x81, 0x54, 0xb7, 0x0e, 0xa2, 0xfa, 0x64, 0x2f, 0xaa, 0xdb, 0xff, 0x9e, 0xea, - 0xce, 0x01, 0x54, 0xc3, 0xfe, 0x54, 0x77, 0xff, 0x81, 0x6a, 0xeb, 0x3f, 0x51, 0xdd, 0xdb, 0x8b, - 0xea, 0xd3, 0x3d, 0xa8, 0x36, 0xed, 0xee, 0xf0, 0x17, 0x03, 0x48, 0x75, 0x68, 0xff, 0xb7, 0x34, - 0x5e, 0x7c, 0x0c, 0xb0, 0xab, 0x82, 0x98, 0x70, 0xe2, 0xe3, 0x0d, 0x4b, 0x42, 0x65, 0x37, 0x48, - 0x0f, 0x60, 0x77, 0xb8, 0x6d, 0x10, 0x80, 0xfc, 0x85, 0x6d, 0x3f, 0x18, 0xff, 0x64, 0x40, 0x53, - 0x6f, 0x4c, 0x3e, 0x29, 0x6e, 0xfa, 0xb5, 0x03, 0x73, 0x50, 0x07, 0x67, 0x77, 0xd4, 0xac, 0x13, - 0x8f, 0x0d, 0xf2, 0x2c, 0x7f, 0x3d, 0xeb, 0x0e, 0x91, 0x47, 0xf5, 0x58, 0x05, 0xf5, 0xc1, 0xe0, - 0x3e, 0xab, 0xd8, 0xe6, 0xfa, 0xec, 0xb7, 0xdb, 0x73, 0xe3, 0x8f, 0xdb, 0x73, 0xe3, 0xcf, 0xdb, - 0x73, 0xe3, 0xbb, 0x93, 0xfc, 0x33, 0x63, 0xde, 0xd2, 0xdf, 0x00, 0x1f, 0xfd, 0x15, 0x00, 0x00, - 0xff, 0xff, 0xb4, 0x96, 0x88, 0xc8, 0x82, 0x08, 0x00, 0x00, + // 848 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0xcf, 0x8e, 0xdb, 0x44, + 0x1c, 0xb6, 0xbb, 0x49, 0x36, 0xf9, 0x39, 0xc9, 0xba, 0xa3, 0x2c, 0xb8, 0x01, 0x82, 0x09, 0xaa, + 0x08, 0x2b, 0xd8, 0x54, 0xa1, 0x70, 0x43, 0x82, 0xa5, 0x95, 0x16, 0x54, 0xa4, 0xd6, 0xc9, 0x89, + 0x8b, 0x35, 0x89, 0x7f, 0x9b, 0x98, 0x38, 0x33, 0xae, 0x67, 0x0c, 0xdd, 0x17, 0xe0, 0xcc, 0x33, + 0x70, 0xe1, 0x15, 0x78, 0x84, 0x3d, 0x56, 0xe2, 0xc2, 0x09, 0xc1, 0xee, 0x8b, 0x20, 0x8f, 0xff, + 0xc4, 0xae, 0x56, 0x90, 0xa8, 0x17, 0x2e, 0xf6, 0xcc, 0xf7, 0x7d, 0x33, 0xe3, 0xdf, 0xe7, 0xdf, + 0x27, 0x1b, 0xde, 0xa1, 0xa1, 0x3f, 0x7e, 0x1e, 0x63, 0x74, 0x99, 0x5e, 0xc3, 0x79, 0x7a, 0x3f, + 0x0d, 0x23, 0x2e, 0x39, 0x69, 0xc8, 0x15, 0x65, 0x5c, 0xf4, 0x7b, 0x4b, 0xbe, 0xe4, 0x0a, 0x1a, + 0x27, 0xa3, 0x94, 0xed, 0xdf, 0x13, 0x92, 0x47, 0x38, 0x56, 0xd7, 0x70, 0x3e, 0x96, 0x97, 0x21, + 0x8a, 0x8c, 0x7a, 0xb3, 0x4a, 0x45, 0xe1, 0x22, 0x23, 0xec, 0x2a, 0x11, 0x46, 0x7c, 0x53, 0x5d, + 0x3a, 0x9c, 0x01, 0x3c, 0x4b, 0x1e, 0x61, 0x2a, 0xa9, 0x14, 0xe4, 0x7d, 0xe8, 0x08, 0xba, 0x09, + 0x03, 0x14, 0xae, 0xe4, 0x92, 0x06, 0x96, 0x6e, 0xeb, 0xa3, 0x03, 0xa7, 0x9d, 0x81, 0xb3, 0x04, + 0x23, 0xef, 0x41, 0x3b, 0x44, 0xba, 0x76, 0x33, 0xd0, 0xba, 0xa3, 0x34, 0x46, 0x82, 0x4d, 0x53, + 0x68, 0xf8, 0x7b, 0x0d, 0xda, 0x6a, 0x5b, 0x07, 0x9f, 0xc7, 0x28, 0x24, 0xe9, 0x41, 0x5d, 0x55, + 0xaa, 0x36, 0x6c, 0x39, 0xe9, 0x84, 0x8c, 0xa1, 0xa5, 0x06, 0x4f, 0x03, 0xca, 0xac, 0xae, 0xad, + 0x8f, 0x8c, 0xc9, 0xdd, 0xd3, 0xd4, 0x84, 0xd3, 0x82, 0x70, 0xb6, 0x9a, 0xe4, 0x68, 0xe9, 0x6f, + 0xd0, 0x15, 0xb8, 0xe0, 0xcc, 0x2b, 0x8e, 0x4e, 0xb0, 0x69, 0x0a, 0x91, 0x0f, 0xe0, 0x28, 0x99, + 0xf2, 0x58, 0x16, 0xaa, 0x03, 0xa5, 0xea, 0x66, 0x70, 0x2e, 0x7c, 0x08, 0x6f, 0x6c, 0xe8, 0x0b, + 0x37, 0x42, 0xc1, 0x83, 0x58, 0xfa, 0x9c, 0x15, 0xfa, 0x9a, 0xd2, 0xf7, 0x36, 0xf4, 0x85, 0x53, + 0x90, 0xf9, 0xaa, 0xfb, 0xd0, 0x8d, 0x30, 0x0c, 0xfc, 0x05, 0x75, 0x03, 0x3a, 0xc7, 0x40, 0x58, + 0x75, 0xfb, 0x60, 0xd4, 0x72, 0x3a, 0x19, 0xfa, 0x44, 0x81, 0xe4, 0x4b, 0xe8, 0x28, 0xd3, 0xbf, + 0xa5, 0x72, 0xb1, 0xc2, 0x48, 0x58, 0x0d, 0xfb, 0x60, 0x64, 0x4c, 0x8e, 0xf3, 0xea, 0xa6, 0x65, + 0xf2, 0xac, 0x76, 0xf5, 0xe7, 0xbb, 0x9a, 0x53, 0x5d, 0x41, 0x6c, 0x30, 0x90, 0xd1, 0x79, 0x80, + 0x8f, 0xd0, 0x8b, 0x43, 0xeb, 0xd0, 0xd6, 0x47, 0x4d, 0xa7, 0x0c, 0x91, 0x87, 0x70, 0x9c, 0x4e, + 0x9f, 0xd2, 0x48, 0xfa, 0x34, 0x70, 0x50, 0x84, 0x9c, 0x09, 0xb4, 0x9a, 0x4a, 0x7b, 0x3b, 0x49, + 0x06, 0x00, 0x62, 0xed, 0x87, 0x5f, 0xad, 0x62, 0xb6, 0x16, 0x16, 0x28, 0x69, 0x09, 0x21, 0x0f, + 0x00, 0xc4, 0x8a, 0x46, 0x9e, 0xeb, 0xb3, 0x0b, 0x6e, 0x19, 0xd5, 0xb7, 0x32, 0x4d, 0x98, 0xaf, + 0xd9, 0x05, 0x77, 0x5a, 0x22, 0x1f, 0x26, 0x4e, 0x06, 0x9c, 0xaf, 0xe7, 0x74, 0xb1, 0x76, 0x3d, + 0x0c, 0x24, 0x2d, 0x9c, 0x6c, 0xa7, 0x4e, 0xe6, 0xec, 0xa3, 0x84, 0xcc, 0x9d, 0x3c, 0x81, 0x06, + 0xb2, 0xa5, 0xcf, 0xd0, 0xea, 0xd8, 0xfa, 0xa8, 0x3b, 0x21, 0xf9, 0x19, 0x8f, 0x15, 0x3a, 0xbb, + 0x0c, 0xd1, 0xc9, 0x14, 0xdf, 0xd4, 0x9a, 0x2d, 0x13, 0x86, 0xcf, 0xa0, 0x53, 0xf1, 0x8d, 0x7c, + 0x01, 0x1d, 0xf5, 0x12, 0x0a, 0x97, 0x75, 0xe5, 0x72, 0x2f, 0xdf, 0xe9, 0x49, 0x89, 0xcc, 0x4d, + 0xae, 0x2c, 0x18, 0xfe, 0xa2, 0x43, 0x27, 0x6b, 0xd4, 0xcc, 0x9e, 0xb7, 0xa1, 0xf9, 0x23, 0x8d, + 0x98, 0xcf, 0x96, 0x22, 0x6d, 0xd6, 0x73, 0xcd, 0x29, 0x10, 0xf2, 0x39, 0x40, 0xd2, 0x46, 0x02, + 0x23, 0x3f, 0xeb, 0x7c, 0x63, 0xf2, 0x56, 0x12, 0xa5, 0x0d, 0xca, 0x15, 0xc6, 0xc2, 0x5d, 0xf0, + 0xf0, 0xf2, 0x74, 0xa6, 0xfa, 0x31, 0x91, 0x9c, 0x6b, 0x4e, 0x69, 0x01, 0x39, 0x81, 0xba, 0x48, + 0x82, 0xa6, 0x5a, 0xd2, 0xd8, 0x96, 0xbc, 0x8d, 0xe0, 0xb9, 0xe6, 0xa4, 0x92, 0xb3, 0x26, 0x34, + 0x22, 0x14, 0x71, 0x20, 0x87, 0x1f, 0x97, 0x62, 0x42, 0x7a, 0x50, 0xfb, 0x5e, 0x70, 0xa6, 0x9e, + 0xad, 0x7d, 0xae, 0x39, 0x6a, 0x76, 0x06, 0xd0, 0x44, 0xb6, 0xe0, 0x9e, 0xcf, 0x96, 0xc3, 0xdf, + 0xea, 0x70, 0x37, 0xad, 0x89, 0xb2, 0x25, 0xee, 0x91, 0x40, 0x73, 0x87, 0x04, 0x7e, 0x04, 0x44, + 0x48, 0x1a, 0x49, 0xf7, 0x96, 0x1c, 0x9a, 0x8a, 0x99, 0x95, 0xc2, 0x38, 0x02, 0x13, 0x99, 0x57, + 0xd5, 0x66, 0x69, 0x44, 0xe6, 0x95, 0x95, 0x1f, 0x82, 0xe9, 0x33, 0x89, 0xd1, 0x0f, 0x34, 0x78, + 0x25, 0x87, 0x47, 0x39, 0xfe, 0x2f, 0x09, 0xaf, 0xef, 0x99, 0xf0, 0xc6, 0x5e, 0x09, 0x3f, 0xdc, + 0x29, 0xe1, 0xcd, 0xd7, 0x4d, 0x78, 0x6b, 0x8f, 0x84, 0xc3, 0xee, 0x09, 0x6f, 0xff, 0x47, 0xc2, + 0x3b, 0xaf, 0x95, 0xf0, 0xee, 0x4e, 0x09, 0x3f, 0xda, 0x21, 0xe1, 0x86, 0xd9, 0x1e, 0xfe, 0xaa, + 0x03, 0x29, 0xb7, 0xee, 0xff, 0x36, 0x93, 0x27, 0x9f, 0x02, 0x6c, 0xab, 0x20, 0x06, 0x1c, 0x7a, + 0x78, 0x41, 0xe3, 0x40, 0x9a, 0x1a, 0xe9, 0x02, 0x6c, 0x0f, 0x37, 0x75, 0x02, 0x90, 0x7d, 0xd8, + 0xcd, 0x3b, 0x93, 0x9f, 0x74, 0xa8, 0xab, 0x8d, 0xc9, 0x67, 0xf9, 0xa0, 0x57, 0x39, 0x30, 0x8b, + 0x6b, 0xff, 0xf8, 0x15, 0x34, 0x75, 0xe2, 0x81, 0x4e, 0x1e, 0x67, 0x1f, 0x6c, 0xe5, 0x10, 0xb9, + 0x57, 0x95, 0x95, 0x02, 0xdf, 0xef, 0xdf, 0x46, 0xe5, 0xdb, 0x9c, 0xdd, 0xbf, 0xfa, 0x7b, 0xa0, + 0x5d, 0x5d, 0x0f, 0xf4, 0x97, 0xd7, 0x03, 0xfd, 0xaf, 0xeb, 0x81, 0xfe, 0xf3, 0xcd, 0x40, 0x7b, + 0x79, 0x33, 0xd0, 0xfe, 0xb8, 0x19, 0x68, 0xdf, 0x1d, 0x66, 0xbf, 0x27, 0xf3, 0x86, 0xfa, 0x4b, + 0xf8, 0xe4, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3b, 0x44, 0xb0, 0xce, 0xba, 0x08, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -996,10 +761,6 @@ func (m *QueryStats) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.PeakSamples != 0 { i = encodeVarintQuery(dAtA, i, uint64(m.PeakSamples)) i-- @@ -1033,10 +794,6 @@ func (m *QueryRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.QueryPlan != nil { { size, err := m.QueryPlan.MarshalToSizedBuffer(dAtA[:i]) @@ -1169,10 +926,6 @@ func (m *StoreMatchers) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.LabelMatchers) > 0 { for iNdEx := len(m.LabelMatchers) - 1; iNdEx >= 0; iNdEx-- { { @@ -1210,10 +963,6 @@ func (m *QueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -1302,10 +1051,6 @@ func (m *QueryPlan) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Encoding != nil { { size := m.Encoding.Size() @@ -1354,10 +1099,6 @@ func (m *QueryRangeRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.QueryPlan != nil { { size, err := m.QueryPlan.MarshalToSizedBuffer(dAtA[:i]) @@ -1502,10 +1243,6 @@ func (m *QueryRangeResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -1597,9 +1334,6 @@ func (m *QueryStats) Size() (n int) { if m.PeakSamples != 0 { n += 1 + sovQuery(uint64(m.PeakSamples)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1657,9 +1391,6 @@ func (m *QueryRequest) Size() (n int) { l = m.QueryPlan.Size() n += 1 + l + sovQuery(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1675,9 +1406,6 @@ func (m *StoreMatchers) Size() (n int) { n += 1 + l + sovQuery(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1690,9 +1418,6 @@ func (m *QueryResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1739,9 +1464,6 @@ func (m *QueryPlan) Size() (n int) { if m.Encoding != nil { n += m.Encoding.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1817,9 +1539,6 @@ func (m *QueryRangeRequest) Size() (n int) { l = m.QueryPlan.Size() n += 2 + l + sovQuery(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1832,9 +1551,6 @@ func (m *QueryRangeResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1958,7 +1674,6 @@ func (m *QueryStats) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2147,7 +1862,7 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.StoreMatchers = append(m.StoreMatchers, &StoreMatchers{}) + m.StoreMatchers = append(m.StoreMatchers, StoreMatchers{}) if err := m.StoreMatchers[len(m.StoreMatchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2334,7 +2049,6 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2402,7 +2116,7 @@ func (m *StoreMatchers) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.LabelMatchers = append(m.LabelMatchers, &storepb.LabelMatcher{}) + m.LabelMatchers = append(m.LabelMatchers, storepb.LabelMatcher{}) if err := m.LabelMatchers[len(m.LabelMatchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2419,7 +2133,6 @@ func (m *StoreMatchers) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2572,7 +2285,6 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2656,7 +2368,6 @@ func (m *QueryPlan) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2883,7 +2594,7 @@ func (m *QueryRangeRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.StoreMatchers = append(m.StoreMatchers, &StoreMatchers{}) + m.StoreMatchers = append(m.StoreMatchers, StoreMatchers{}) if err := m.StoreMatchers[len(m.StoreMatchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3070,7 +2781,6 @@ func (m *QueryRangeRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3223,7 +2933,6 @@ func (m *QueryRangeResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/api/query/querypb/query.proto b/pkg/api/query/querypb/query.proto index a612adb4f0..6a55365a1b 100644 --- a/pkg/api/query/querypb/query.proto +++ b/pkg/api/query/querypb/query.proto @@ -6,10 +6,22 @@ package thanos; option go_package = "querypb"; +import "gogoproto/gogo.proto"; import "store/storepb/types.proto"; import "store/storepb/rpc.proto"; import "store/storepb/prompb/types.proto"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + enum EngineType { default = 0; prometheus = 1; @@ -31,7 +43,7 @@ message QueryRequest { repeated string replica_labels = 5; - repeated StoreMatchers storeMatchers = 6; + repeated StoreMatchers storeMatchers = 6 [(gogoproto.nullable) = false]; bool enableDedup = 7; bool enablePartialResponse = 8; @@ -46,7 +58,7 @@ message QueryRequest { } message StoreMatchers { - repeated LabelMatcher labelMatchers = 1; + repeated LabelMatcher labelMatchers = 1 [(gogoproto.nullable) = false]; } message QueryResponse { @@ -81,7 +93,7 @@ message QueryRangeRequest { repeated string replica_labels = 7; - repeated StoreMatchers storeMatchers = 8; + repeated StoreMatchers storeMatchers = 8 [(gogoproto.nullable) = false]; bool enableDedup = 9; bool enablePartialResponse = 10; diff --git a/pkg/api/query/querypb/store_matchers.go b/pkg/api/query/querypb/store_matchers.go index 1593295806..41b8f223cb 100644 --- a/pkg/api/query/querypb/store_matchers.go +++ b/pkg/api/query/querypb/store_matchers.go @@ -8,7 +8,7 @@ import ( "github.com/thanos-io/thanos/pkg/store/storepb" ) -func StoreMatchersToLabelMatchers(matchers []*StoreMatchers) ([][]*labels.Matcher, error) { +func StoreMatchersToLabelMatchers(matchers []StoreMatchers) ([][]*labels.Matcher, error) { if len(matchers) == 0 { return nil, nil } diff --git a/pkg/api/query/v1.go b/pkg/api/query/v1.go index 8d3969cb0e..7d6d85e28d 100644 --- a/pkg/api/query/v1.go +++ b/pkg/api/query/v1.go @@ -1618,7 +1618,7 @@ func NewMetricMetadataHandler(client metadata.UnaryClient, enablePartialResponse defer span.Finish() var ( - t map[string][]*metadatapb.Meta + t map[string][]metadatapb.Meta warnings annotations.Annotations err error ) diff --git a/pkg/api/query/v1_test.go b/pkg/api/query/v1_test.go index 655a195e9c..bc94d4d35b 100644 --- a/pkg/api/query/v1_test.go +++ b/pkg/api/query/v1_test.go @@ -1782,48 +1782,48 @@ func TestParseLimitParam(t *testing.T) { } func TestRulesHandler(t *testing.T) { - twoHAgo := rulespb.TimeToTimestamp(time.Now().Add(-2 * time.Hour)) + twoHAgo := time.Now().Add(-2 * time.Hour) all := []*rulespb.Rule{ rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "1", - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(1 * time.Minute)), + LastEvaluation: time.Time{}.Add(1 * time.Minute), EvaluationDurationSeconds: 12, Health: "x", Query: "sum(up)", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "some", Value: "label"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "some", Value: "label"}}}, LastError: "err1", }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "2", - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(2 * time.Minute)), + LastEvaluation: time.Time{}.Add(2 * time.Minute), EvaluationDurationSeconds: 12, Health: "x", Query: "sum(up1)", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "some", Value: "label2"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "some", Value: "label2"}}}, }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "3", - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(3 * time.Minute)), + LastEvaluation: time.Time{}.Add(3 * time.Minute), EvaluationDurationSeconds: 12, Health: "x", Query: "sum(up2) == 2", DurationSeconds: 101, KeepFiringForSeconds: 102, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "some", Value: "label3"}}}, - Annotations: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "ann", Value: "a1"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "some", Value: "label3"}}}, + Annotations: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "ann", Value: "a1"}}}, Alerts: []*rulespb.AlertInstance{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "inside", Value: "1"}}}, - Annotations: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "insideann", Value: "2"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "inside", Value: "1"}}}, + Annotations: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "insideann", Value: "2"}}}, State: rulespb.AlertState_FIRING, - ActiveAt: twoHAgo, + ActiveAt: &twoHAgo, Value: "1", // This is unlikely if groups is warn, but test nevertheless. PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "inside", Value: "3"}}}, - Annotations: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "insideann", Value: "4"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "inside", Value: "3"}}}, + Annotations: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "insideann", Value: "4"}}}, State: rulespb.AlertState_PENDING, ActiveAt: nil, Value: "2", @@ -1835,24 +1835,24 @@ func TestRulesHandler(t *testing.T) { }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "4", - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(4 * time.Minute)), + LastEvaluation: time.Time{}.Add(4 * time.Minute), EvaluationDurationSeconds: 122, Health: "x", DurationSeconds: 102, KeepFiringForSeconds: 103, Query: "sum(up3) == 3", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "some", Value: "label4"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "some", Value: "label4"}}}, State: rulespb.AlertState_INACTIVE, }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "5", - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(4 * time.Minute)), + LastEvaluation: time.Time{}.Add(4 * time.Minute), EvaluationDurationSeconds: 122, Health: "x", DurationSeconds: 61, KeepFiringForSeconds: 62, Query: "sum(up4) == 4", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "some", Value: "label5"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "some", Value: "label5"}}}, State: rulespb.AlertState_INACTIVE, }), } @@ -1866,7 +1866,7 @@ func TestRulesHandler(t *testing.T) { Rules: all, Interval: 1, EvaluationDurationSeconds: 214, - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(10 * time.Minute)), + LastEvaluation: time.Time{}.Add(10 * time.Minute), PartialResponseStrategy: storepb.PartialResponseStrategy_WARN, }, { @@ -1875,7 +1875,7 @@ func TestRulesHandler(t *testing.T) { Rules: all[3:], Interval: 10, EvaluationDurationSeconds: 2142, - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(100 * time.Minute)), + LastEvaluation: time.Time{}.Add(100 * time.Minute), PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, }, @@ -1886,7 +1886,7 @@ func TestRulesHandler(t *testing.T) { Rules: all[:2], Interval: 1, EvaluationDurationSeconds: 214, - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(20 * time.Minute)), + LastEvaluation: time.Time{}.Add(20 * time.Minute), PartialResponseStrategy: storepb.PartialResponseStrategy_WARN, }, }, @@ -1897,7 +1897,7 @@ func TestRulesHandler(t *testing.T) { Rules: all[2:], Interval: 1, EvaluationDurationSeconds: 214, - LastEvaluation: rulespb.TimeToTimestamp(time.Time{}.Add(30 * time.Minute)), + LastEvaluation: time.Time{}.Add(30 * time.Minute), PartialResponseStrategy: storepb.PartialResponseStrategy_WARN, }, }, @@ -1916,7 +1916,7 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[0].GetRecording().Labels.Labels), Health: rules.RuleHealth(all[0].GetRecording().Health), LastError: all[0].GetRecording().LastError, - LastEvaluation: rulespb.TimestampToTime(all[0].GetRecording().LastEvaluation), + LastEvaluation: all[0].GetRecording().LastEvaluation, EvaluationTime: all[0].GetRecording().EvaluationDurationSeconds, Type: "recording", }, @@ -1926,7 +1926,7 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[1].GetRecording().Labels.Labels), Health: rules.RuleHealth(all[1].GetRecording().Health), LastError: all[1].GetRecording().LastError, - LastEvaluation: rulespb.TimestampToTime(all[1].GetRecording().LastEvaluation), + LastEvaluation: all[1].GetRecording().LastEvaluation, EvaluationTime: all[1].GetRecording().EvaluationDurationSeconds, Type: "recording", }, @@ -1937,7 +1937,7 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[2].GetAlert().Labels.Labels), Health: rules.RuleHealth(all[2].GetAlert().Health), LastError: all[2].GetAlert().LastError, - LastEvaluation: rulespb.TimestampToTime(all[2].GetAlert().LastEvaluation), + LastEvaluation: all[2].GetAlert().LastEvaluation, EvaluationTime: all[2].GetAlert().EvaluationDurationSeconds, Duration: all[2].GetAlert().DurationSeconds, KeepFiringFor: all[2].GetAlert().KeepFiringForSeconds, @@ -1947,7 +1947,7 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[2].GetAlert().Alerts[0].Labels.Labels), Annotations: labelpb.LabelpbLabelsToPromLabels(all[2].GetAlert().Alerts[0].Annotations.Labels), State: strings.ToLower(all[2].GetAlert().Alerts[0].State.String()), - ActiveAt: rulespb.TimestampToTime(all[2].GetAlert().Alerts[0].ActiveAt), + ActiveAt: all[2].GetAlert().Alerts[0].ActiveAt, Value: all[2].GetAlert().Alerts[0].Value, PartialResponseStrategy: all[2].GetAlert().Alerts[0].PartialResponseStrategy.String(), }, @@ -1955,7 +1955,7 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[2].GetAlert().Alerts[1].Labels.Labels), Annotations: labelpb.LabelpbLabelsToPromLabels(all[2].GetAlert().Alerts[1].Annotations.Labels), State: strings.ToLower(all[2].GetAlert().Alerts[1].State.String()), - ActiveAt: rulespb.TimestampToTime(all[2].GetAlert().Alerts[1].ActiveAt), + ActiveAt: all[2].GetAlert().Alerts[1].ActiveAt, Value: all[2].GetAlert().Alerts[1].Value, PartialResponseStrategy: all[2].GetAlert().Alerts[1].PartialResponseStrategy.String(), }, @@ -1969,11 +1969,11 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[3].GetAlert().Labels.Labels), Health: rules.RuleHealth(all[2].GetAlert().Health), LastError: all[3].GetAlert().LastError, - LastEvaluation: rulespb.TimestampToTime(all[3].GetAlert().LastEvaluation), + LastEvaluation: all[3].GetAlert().LastEvaluation, EvaluationTime: all[3].GetAlert().EvaluationDurationSeconds, Duration: all[3].GetAlert().DurationSeconds, KeepFiringFor: all[3].GetAlert().KeepFiringForSeconds, - Annotations: labels.EmptyLabels(), + Annotations: nil, Alerts: []*testpromcompatibility.Alert{}, Type: "alerting", }, @@ -1984,11 +1984,11 @@ func TestRulesHandler(t *testing.T) { Labels: labelpb.LabelpbLabelsToPromLabels(all[4].GetAlert().Labels.Labels), Health: rules.RuleHealth(all[2].GetAlert().Health), LastError: all[4].GetAlert().LastError, - LastEvaluation: rulespb.TimestampToTime(all[4].GetAlert().LastEvaluation), + LastEvaluation: all[4].GetAlert().LastEvaluation, EvaluationTime: all[4].GetAlert().EvaluationDurationSeconds, Duration: all[4].GetAlert().DurationSeconds, KeepFiringFor: all[4].GetAlert().KeepFiringForSeconds, - Annotations: labels.EmptyLabels(), + Annotations: nil, Alerts: []*testpromcompatibility.Alert{}, Type: "alerting", }, @@ -2051,9 +2051,6 @@ func TestRulesHandler(t *testing.T) { }, }, } { - if test.query.Encode() != "" { - continue - } t.Run(fmt.Sprintf("endpoint=%s/method=%s/query=%q", "rules", http.MethodGet, test.query.Encode()), func(t *testing.T) { // Build a context with the correct request params. ctx := context.Background() diff --git a/pkg/dedup/iter.go b/pkg/dedup/iter.go index c0f30d7d0a..e3b3fe8f0f 100644 --- a/pkg/dedup/iter.go +++ b/pkg/dedup/iter.go @@ -48,7 +48,7 @@ type overlapSplitSet struct { currLabels labels.Labels currI int - replicas [][]*storepb.AggrChunk + replicas [][]storepb.AggrChunk } func (o *overlapSplitSet) Next() bool { @@ -70,7 +70,7 @@ func (o *overlapSplitSet) Next() bool { return false } - var chunks []*storepb.AggrChunk + var chunks []storepb.AggrChunk o.currLabels, chunks = o.set.At() if len(chunks) == 0 { return true @@ -87,12 +87,12 @@ chunksLoop: continue chunksLoop } } - o.replicas = append(o.replicas, []*storepb.AggrChunk{chunks[i]}) // Not found, add to a new "fake" series. + o.replicas = append(o.replicas, []storepb.AggrChunk{chunks[i]}) // Not found, add to a new "fake" series. } return true } -func (o *overlapSplitSet) At() (labels.Labels, []*storepb.AggrChunk) { +func (o *overlapSplitSet) At() (labels.Labels, []storepb.AggrChunk) { return o.currLabels, o.replicas[o.currI] } diff --git a/pkg/dedup/iter_test.go b/pkg/dedup/iter_test.go index f92d0ae920..36a5a36375 100644 --- a/pkg/dedup/iter_test.go +++ b/pkg/dedup/iter_test.go @@ -153,7 +153,7 @@ var expectedRealSeriesWithStaleMarkerDeduplicatedForRate = []sample{ type chunkedSeries struct { lset labels.Labels - chunks []*storepb.AggrChunk + chunks []storepb.AggrChunk } type chunkedSeriesSet struct { @@ -177,7 +177,7 @@ func (*chunkedSeriesSet) Err() error { return nil } -func (s *chunkedSeriesSet) At() (labels.Labels, []*storepb.AggrChunk) { +func (s *chunkedSeriesSet) At() (labels.Labels, []storepb.AggrChunk) { return s.series[s.i].lset, s.series[s.i].chunks } @@ -200,23 +200,23 @@ func TestOverlapSplitSet(t *testing.T) { }, { lset: labels.FromStrings("a", "2_nonoverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 100}, {MinTime: 110, MaxTime: 300}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 100}, {MinTime: 110, MaxTime: 300}}, }, { lset: labels.FromStrings("a", "3_tworeplicas"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 0, MaxTime: 30}, {MinTime: 21, MaxTime: 50}, {MinTime: 31, MaxTime: 60}, {MinTime: 100, MaxTime: 160}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 0, MaxTime: 30}, {MinTime: 21, MaxTime: 50}, {MinTime: 31, MaxTime: 60}, {MinTime: 100, MaxTime: 160}}, }, { lset: labels.FromStrings("a", "4_nonoverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 50, MaxTime: 55}, {MinTime: 56, MaxTime: 100}}, + chunks: []storepb.AggrChunk{{MinTime: 50, MaxTime: 55}, {MinTime: 56, MaxTime: 100}}, }, { lset: labels.FromStrings("a", "5_minimaloverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 50, MaxTime: 55}, {MinTime: 55, MaxTime: 100}}, + chunks: []storepb.AggrChunk{{MinTime: 50, MaxTime: 55}, {MinTime: 55, MaxTime: 100}}, }, { lset: labels.FromStrings("a", "6_fourreplica"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 0, MaxTime: 30}, {MinTime: 1, MaxTime: 15}, {MinTime: 2, MaxTime: 36}, {MinTime: 16, MaxTime: 200}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 0, MaxTime: 30}, {MinTime: 1, MaxTime: 15}, {MinTime: 2, MaxTime: 36}, {MinTime: 16, MaxTime: 200}, {MinTime: 21, MaxTime: 50}, {MinTime: 31, MaxTime: 60}, {MinTime: 100, MaxTime: 160}}, }, } @@ -226,43 +226,43 @@ func TestOverlapSplitSet(t *testing.T) { }, { lset: labels.FromStrings("a", "2_nonoverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 100}, {MinTime: 110, MaxTime: 300}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 100}, {MinTime: 110, MaxTime: 300}}, }, { lset: labels.FromStrings("a", "3_tworeplicas"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 50}, {MinTime: 100, MaxTime: 160}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 50}, {MinTime: 100, MaxTime: 160}}, }, { lset: labels.FromStrings("a", "3_tworeplicas"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 30}, {MinTime: 31, MaxTime: 60}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 30}, {MinTime: 31, MaxTime: 60}}, }, { lset: labels.FromStrings("a", "4_nonoverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 50, MaxTime: 55}, {MinTime: 56, MaxTime: 100}}, + chunks: []storepb.AggrChunk{{MinTime: 50, MaxTime: 55}, {MinTime: 56, MaxTime: 100}}, }, { lset: labels.FromStrings("a", "5_minimaloverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 50, MaxTime: 55}}, + chunks: []storepb.AggrChunk{{MinTime: 50, MaxTime: 55}}, }, { lset: labels.FromStrings("a", "5_minimaloverlap"), - chunks: []*storepb.AggrChunk{{MinTime: 55, MaxTime: 100}}, + chunks: []storepb.AggrChunk{{MinTime: 55, MaxTime: 100}}, }, { lset: labels.FromStrings("a", "6_fourreplica"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 50}, {MinTime: 100, MaxTime: 160}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 20}, {MinTime: 21, MaxTime: 50}, {MinTime: 100, MaxTime: 160}}, }, { lset: labels.FromStrings("a", "6_fourreplica"), - chunks: []*storepb.AggrChunk{{MinTime: 0, MaxTime: 30}, {MinTime: 31, MaxTime: 60}}, + chunks: []storepb.AggrChunk{{MinTime: 0, MaxTime: 30}, {MinTime: 31, MaxTime: 60}}, }, { lset: labels.FromStrings("a", "6_fourreplica"), - chunks: []*storepb.AggrChunk{{MinTime: 1, MaxTime: 15}, {MinTime: 16, MaxTime: 200}}, + chunks: []storepb.AggrChunk{{MinTime: 1, MaxTime: 15}, {MinTime: 16, MaxTime: 200}}, }, { lset: labels.FromStrings("a", "6_fourreplica"), - chunks: []*storepb.AggrChunk{{MinTime: 2, MaxTime: 36}}, + chunks: []storepb.AggrChunk{{MinTime: 2, MaxTime: 36}}, }, } diff --git a/pkg/exemplars/exemplars.go b/pkg/exemplars/exemplars.go index 5851b20a68..3e16fa2a7b 100644 --- a/pkg/exemplars/exemplars.go +++ b/pkg/exemplars/exemplars.go @@ -155,11 +155,11 @@ func dedupExemplars(exemplars []*exemplarspb.Exemplar) []*exemplarspb.Exemplar { return exemplars[:i+1] } -func removeReplicaLabels(labels []*labelpb.Label, replicaLabels map[string]struct{}) []*labelpb.Label { +func removeReplicaLabels(labels []labelpb.Label, replicaLabels map[string]struct{}) []labelpb.Label { if len(replicaLabels) == 0 { return labels } - newLabels := make([]*labelpb.Label, 0, len(labels)) + newLabels := make([]labelpb.Label, 0, len(labels)) for _, l := range labels { if _, ok := replicaLabels[l.Name]; !ok { newLabels = append(newLabels, l) diff --git a/pkg/exemplars/exemplars_test.go b/pkg/exemplars/exemplars_test.go index 2d620b41c5..ef9535ca02 100644 --- a/pkg/exemplars/exemplars_test.go +++ b/pkg/exemplars/exemplars_test.go @@ -36,7 +36,7 @@ func TestDedupExemplarsResponse(t *testing.T) { name: "empty exemplars data", exemplars: []*exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -51,7 +51,7 @@ func TestDedupExemplarsResponse(t *testing.T) { replicaLabels: []string{"replica"}, exemplars: []*exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -60,14 +60,14 @@ func TestDedupExemplarsResponse(t *testing.T) { }}, Exemplars: []*exemplarspb.Exemplar{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, Ts: 1600096955479, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, @@ -76,7 +76,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }, }, { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -85,7 +85,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }}, Exemplars: []*exemplarspb.Exemplar{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, @@ -96,7 +96,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }, want: []*exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -104,7 +104,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }}, Exemplars: []*exemplarspb.Exemplar{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, @@ -119,7 +119,7 @@ func TestDedupExemplarsResponse(t *testing.T) { replicaLabels: []string{"replica"}, exemplars: []*exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -128,14 +128,14 @@ func TestDedupExemplarsResponse(t *testing.T) { }}, Exemplars: []*exemplarspb.Exemplar{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, Ts: 1600096955479, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "foo"}, }}, Value: 19, @@ -144,7 +144,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }, }, { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -153,14 +153,14 @@ func TestDedupExemplarsResponse(t *testing.T) { }}, Exemplars: []*exemplarspb.Exemplar{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "bar"}, }}, Value: 19, Ts: 1600096955579, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, @@ -168,7 +168,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }, // Same ts but different labels, cannot dedup. { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "test"}, }}, Value: 19, @@ -179,7 +179,7 @@ func TestDedupExemplarsResponse(t *testing.T) { }, want: []*exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__name__", Value: "test_exemplar_metric_total"}, {Name: "instance", Value: "localhost:8090"}, {Name: "job", Value: "prometheus"}, @@ -187,28 +187,28 @@ func TestDedupExemplarsResponse(t *testing.T) { }}, Exemplars: []*exemplarspb.Exemplar{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "foo"}, }}, Value: 19, Ts: 1600096955470, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "EpTxMJ40fUus7aGY"}, }}, Value: 19, Ts: 1600096955479, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "test"}, }}, Value: 19, Ts: 1600096955479, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "traceID", Value: "bar"}, }}, Value: 19, diff --git a/pkg/exemplars/exemplarspb/custom.go b/pkg/exemplars/exemplarspb/custom.go index 17e5248931..14d8dd6a12 100644 --- a/pkg/exemplars/exemplarspb/custom.go +++ b/pkg/exemplars/exemplarspb/custom.go @@ -31,7 +31,7 @@ func (m *Exemplar) UnmarshalJSON(b []byte) error { return err } - m.Labels = &v.Labels + m.Labels = v.Labels m.Ts = int64(v.TimeStamp) m.Value = float64(v.Value) @@ -74,10 +74,10 @@ func (s1 *ExemplarData) Compare(s2 *ExemplarData) int { } func (s *ExemplarData) SetSeriesLabels(ls labels.Labels) { - var result *labelpb.LabelSet + var result labelpb.LabelSet if !ls.IsEmpty() { - result = &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} + result = labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} } s.SeriesLabels = result @@ -102,7 +102,7 @@ func ExemplarsFromPromExemplars(exemplars []exemplar.Exemplar) []*Exemplar { ex := make([]*Exemplar, 0, len(exemplars)) for _, e := range exemplars { ex = append(ex, &Exemplar{ - Labels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(e.Labels)}, + Labels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(e.Labels)}, Value: e.Value, Ts: e.Ts, }) diff --git a/pkg/exemplars/exemplarspb/rpc.pb.go b/pkg/exemplars/exemplarspb/rpc.pb.go index 42ef88cbb3..784c966649 100644 --- a/pkg/exemplars/exemplarspb/rpc.pb.go +++ b/pkg/exemplars/exemplarspb/rpc.pb.go @@ -11,6 +11,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" storepb "github.com/thanos-io/thanos/pkg/store/storepb" @@ -35,9 +36,6 @@ type ExemplarsRequest struct { Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` PartialResponseStrategy storepb.PartialResponseStrategy `protobuf:"varint,4,opt,name=partial_response_strategy,json=partialResponseStrategy,proto3,enum=thanos.PartialResponseStrategy" json:"partial_response_strategy,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *ExemplarsRequest) Reset() { *m = ExemplarsRequest{} } @@ -73,42 +71,11 @@ func (m *ExemplarsRequest) XXX_DiscardUnknown() { var xxx_messageInfo_ExemplarsRequest proto.InternalMessageInfo -func (m *ExemplarsRequest) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -func (m *ExemplarsRequest) GetStart() int64 { - if m != nil { - return m.Start - } - return 0 -} - -func (m *ExemplarsRequest) GetEnd() int64 { - if m != nil { - return m.End - } - return 0 -} - -func (m *ExemplarsRequest) GetPartialResponseStrategy() storepb.PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return storepb.PartialResponseStrategy_WARN -} - type ExemplarsResponse struct { // Types that are valid to be assigned to Result: // *ExemplarsResponse_Data // *ExemplarsResponse_Warning - Result isExemplarsResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isExemplarsResponse_Result `protobuf_oneof:"result"` } func (m *ExemplarsResponse) Reset() { *m = ExemplarsResponse{} } @@ -190,13 +157,8 @@ func (*ExemplarsResponse) XXX_OneofWrappers() []interface{} { } type ExemplarData struct { - // @gotags: json:"seriesLabels" - SeriesLabels *labelpb.LabelSet `protobuf:"bytes,1,opt,name=seriesLabels,proto3" json:"seriesLabels"` - // @gotags: json:"exemplars" - Exemplars []*Exemplar `protobuf:"bytes,2,rep,name=exemplars,proto3" json:"exemplars"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + SeriesLabels labelpb.LabelSet `protobuf:"bytes,1,opt,name=seriesLabels,proto3" json:"seriesLabels"` + Exemplars []*Exemplar `protobuf:"bytes,2,rep,name=exemplars,proto3" json:"exemplars"` } func (m *ExemplarData) Reset() { *m = ExemplarData{} } @@ -232,30 +194,10 @@ func (m *ExemplarData) XXX_DiscardUnknown() { var xxx_messageInfo_ExemplarData proto.InternalMessageInfo -func (m *ExemplarData) GetSeriesLabels() *labelpb.LabelSet { - if m != nil { - return m.SeriesLabels - } - return nil -} - -func (m *ExemplarData) GetExemplars() []*Exemplar { - if m != nil { - return m.Exemplars - } - return nil -} - type Exemplar struct { - // @gotags: json:"labels" - Labels *labelpb.LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - // @gotags: json:"value" - Value float64 `protobuf:"fixed64,2,opt,name=value,proto3" json:"value"` - // @gotags: json:"timestamp" - Ts int64 `protobuf:"varint,3,opt,name=ts,proto3" json:"timestamp"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels labelpb.LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Value float64 `protobuf:"fixed64,2,opt,name=value,proto3" json:"value"` + Ts int64 `protobuf:"varint,3,opt,name=ts,proto3" json:"timestamp"` } func (m *Exemplar) Reset() { *m = Exemplar{} } @@ -291,27 +233,6 @@ func (m *Exemplar) XXX_DiscardUnknown() { var xxx_messageInfo_Exemplar proto.InternalMessageInfo -func (m *Exemplar) GetLabels() *labelpb.LabelSet { - if m != nil { - return m.Labels - } - return nil -} - -func (m *Exemplar) GetValue() float64 { - if m != nil { - return m.Value - } - return 0 -} - -func (m *Exemplar) GetTs() int64 { - if m != nil { - return m.Ts - } - return 0 -} - func init() { proto.RegisterType((*ExemplarsRequest)(nil), "thanos.ExemplarsRequest") proto.RegisterType((*ExemplarsResponse)(nil), "thanos.ExemplarsResponse") @@ -322,32 +243,36 @@ func init() { func init() { proto.RegisterFile("exemplars/exemplarspb/rpc.proto", fileDescriptor_fd9ad2a40bac3cc9) } var fileDescriptor_fd9ad2a40bac3cc9 = []byte{ - // 386 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xc1, 0x6e, 0xd4, 0x30, - 0x10, 0xad, 0x93, 0x12, 0x9a, 0x69, 0x55, 0x2d, 0x56, 0x25, 0x92, 0x3d, 0x6c, 0xa3, 0x9c, 0x22, - 0x0e, 0x09, 0x5a, 0xf8, 0x82, 0x15, 0x48, 0x3d, 0x20, 0x81, 0xdc, 0x5b, 0x39, 0x54, 0x0e, 0x1d, - 0x95, 0x95, 0x42, 0xe2, 0xb5, 0x27, 0xc0, 0x7e, 0x14, 0xff, 0xc1, 0x91, 0x4f, 0x40, 0xfb, 0x25, - 0x28, 0x76, 0xbc, 0x64, 0xd1, 0x8a, 0x8b, 0xe5, 0x79, 0xef, 0x79, 0xe6, 0x79, 0x66, 0xe0, 0x1a, - 0xbf, 0xe3, 0x17, 0xd5, 0x48, 0x6d, 0xaa, 0xfd, 0x4d, 0xd5, 0x95, 0x56, 0x9f, 0x4a, 0xa5, 0x3b, - 0xea, 0x78, 0x44, 0x9f, 0x65, 0xdb, 0x99, 0x79, 0x6a, 0xa8, 0xd3, 0x58, 0xd9, 0x53, 0xd5, 0x15, - 0x6d, 0x15, 0x1a, 0x27, 0xf1, 0x54, 0x23, 0x6b, 0x6c, 0x0e, 0xa9, 0xfc, 0x07, 0x83, 0xd9, 0x5b, - 0x9f, 0x57, 0xe0, 0xa6, 0x47, 0x43, 0xfc, 0x0a, 0x9e, 0x6c, 0x7a, 0xd4, 0xdb, 0x84, 0x65, 0xac, - 0x88, 0x85, 0x0b, 0x06, 0xd4, 0x90, 0xd4, 0x94, 0x04, 0x19, 0x2b, 0x42, 0xe1, 0x02, 0x3e, 0x83, - 0x10, 0xdb, 0x87, 0x24, 0xb4, 0xd8, 0x70, 0xe5, 0x1f, 0x21, 0x55, 0x52, 0xd3, 0x5a, 0x36, 0xf7, - 0x1a, 0x8d, 0xea, 0x5a, 0x83, 0xf7, 0x86, 0xb4, 0x24, 0x7c, 0xdc, 0x26, 0xa7, 0x19, 0x2b, 0x2e, - 0x97, 0xd7, 0xa5, 0x33, 0x5d, 0x7e, 0x70, 0x42, 0x31, 0xea, 0x6e, 0x47, 0x99, 0x78, 0xae, 0x8e, - 0x13, 0x39, 0xc2, 0xb3, 0x89, 0x5d, 0x47, 0xf2, 0x17, 0x70, 0xfa, 0x20, 0x49, 0x5a, 0xbb, 0xe7, - 0xcb, 0x2b, 0x9f, 0xdc, 0x0b, 0xdf, 0x48, 0x92, 0x37, 0x27, 0xc2, 0x6a, 0xf8, 0x1c, 0x9e, 0x7e, - 0x93, 0xba, 0x5d, 0xb7, 0x8f, 0xf6, 0x1f, 0xf1, 0xcd, 0x89, 0xf0, 0xc0, 0xea, 0x0c, 0x22, 0x8d, - 0xa6, 0x6f, 0x28, 0x27, 0xb8, 0x98, 0xbe, 0xe6, 0xaf, 0xe1, 0xc2, 0xa0, 0x5e, 0xa3, 0x79, 0x37, - 0xf4, 0xd0, 0x8c, 0x95, 0x66, 0xbe, 0x92, 0x45, 0x6f, 0x91, 0xc4, 0x81, 0x8a, 0x97, 0x10, 0xef, - 0x67, 0x96, 0x04, 0x59, 0x38, 0x7d, 0xe2, 0xd3, 0x8b, 0xbf, 0x92, 0xfc, 0x0e, 0xce, 0x3c, 0xcc, - 0x0b, 0x88, 0x9a, 0xff, 0xd7, 0x1a, 0xf9, 0x61, 0x2e, 0x5f, 0x65, 0xd3, 0xa3, 0xfd, 0x0f, 0x13, - 0x2e, 0xe0, 0x97, 0x10, 0x90, 0x19, 0xc7, 0x12, 0x90, 0x59, 0xbe, 0x87, 0x78, 0xdf, 0x38, 0xbe, - 0x9a, 0x06, 0xc9, 0xbf, 0x96, 0xfc, 0x1e, 0xcc, 0xd3, 0x23, 0x8c, 0x6b, 0xf9, 0x4b, 0xb6, 0x4a, - 0x7f, 0xee, 0x16, 0xec, 0xd7, 0x6e, 0xc1, 0x7e, 0xef, 0x16, 0xec, 0xee, 0x7c, 0xb2, 0x9c, 0x75, - 0x64, 0x77, 0xeb, 0xd5, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x23, 0x94, 0xcb, 0x12, 0xbc, 0x02, - 0x00, 0x00, + // 462 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0x86, 0xbd, 0x49, 0x1b, 0xea, 0x69, 0xa9, 0xc2, 0x2a, 0x12, 0x4e, 0x24, 0xec, 0x28, 0xa7, + 0xc0, 0x21, 0x46, 0xe1, 0xc2, 0x85, 0x8b, 0x05, 0x52, 0x85, 0x90, 0x40, 0xdb, 0x1b, 0x1c, 0xaa, + 0x0d, 0x1d, 0x85, 0x48, 0x8e, 0xbd, 0xdd, 0x9d, 0x00, 0xb9, 0xf3, 0x00, 0x5c, 0x79, 0x08, 0xde, + 0x23, 0xc7, 0x1e, 0x39, 0x45, 0x90, 0xdc, 0xf2, 0x14, 0x28, 0x5e, 0x6f, 0xea, 0x56, 0x51, 0x2f, + 0xde, 0x99, 0x7f, 0x3e, 0xef, 0xcc, 0xec, 0xec, 0x42, 0x84, 0xdf, 0x71, 0xaa, 0x52, 0xa9, 0x4d, + 0xbc, 0xb3, 0xd4, 0x28, 0xd6, 0xea, 0xf3, 0x40, 0xe9, 0x9c, 0x72, 0xde, 0xa0, 0x2f, 0x32, 0xcb, + 0x4d, 0xa7, 0x6d, 0x28, 0xd7, 0x18, 0x17, 0x5f, 0x35, 0x8a, 0x69, 0xae, 0xd0, 0x58, 0xc4, 0x85, + 0x52, 0x39, 0xc2, 0xf4, 0x4e, 0xa8, 0x35, 0xce, 0xc7, 0x79, 0x61, 0xc6, 0x5b, 0xcb, 0xaa, 0xbd, + 0xdf, 0x0c, 0x9a, 0x6f, 0x5c, 0x36, 0x81, 0x57, 0x33, 0x34, 0xc4, 0x5b, 0x70, 0x78, 0x35, 0x43, + 0x3d, 0x0f, 0x58, 0x97, 0xf5, 0x7d, 0x61, 0x9d, 0xad, 0x6a, 0x48, 0x6a, 0x0a, 0x6a, 0x5d, 0xd6, + 0xaf, 0x0b, 0xeb, 0xf0, 0x26, 0xd4, 0x31, 0xbb, 0x0c, 0xea, 0x85, 0xb6, 0x35, 0xf9, 0x27, 0x68, + 0x2b, 0xa9, 0x69, 0x22, 0xd3, 0x0b, 0x8d, 0x46, 0xe5, 0x99, 0xc1, 0x0b, 0x43, 0x5a, 0x12, 0x8e, + 0xe7, 0xc1, 0x41, 0x97, 0xf5, 0x4f, 0x87, 0xd1, 0xc0, 0xb6, 0x32, 0xf8, 0x60, 0x41, 0x51, 0x72, + 0xe7, 0x25, 0x26, 0x1e, 0xab, 0xfd, 0x81, 0x1e, 0xc2, 0xa3, 0x4a, 0xb9, 0x36, 0xc8, 0x9f, 0xc1, + 0xc1, 0xa5, 0x24, 0x59, 0x94, 0x7b, 0x3c, 0x6c, 0xb9, 0xcd, 0x1d, 0xf8, 0x5a, 0x92, 0x3c, 0xf3, + 0x44, 0xc1, 0xf0, 0x0e, 0x3c, 0xf8, 0x26, 0x75, 0x36, 0xc9, 0xc6, 0x45, 0x1f, 0xfe, 0x99, 0x27, + 0x9c, 0x90, 0x1c, 0x41, 0x43, 0xa3, 0x99, 0xa5, 0xd4, 0xfb, 0xc5, 0xe0, 0xa4, 0xfa, 0x3b, 0x7f, + 0x0b, 0x27, 0x06, 0xf5, 0x04, 0xcd, 0xbb, 0xed, 0xd1, 0x9a, 0x32, 0x55, 0xd3, 0xa5, 0x2a, 0xd4, + 0x73, 0xa4, 0xa4, 0xb5, 0x58, 0x46, 0xde, 0x66, 0x19, 0xdd, 0xa2, 0xc5, 0x2d, 0x8f, 0xbf, 0x02, + 0x7f, 0x37, 0xe0, 0xa0, 0xd6, 0xad, 0x57, 0x37, 0x72, 0x49, 0x93, 0x87, 0x9b, 0x65, 0x74, 0x83, + 0x89, 0x1b, 0xb3, 0xf7, 0x83, 0xc1, 0x91, 0xc3, 0xf8, 0x4b, 0x68, 0xa4, 0xf7, 0x57, 0x74, 0x5a, + 0x56, 0x54, 0x72, 0xa2, 0x5c, 0x79, 0x04, 0x87, 0x5f, 0x65, 0x3a, 0xc3, 0xe2, 0x18, 0x58, 0xe2, + 0x6f, 0x96, 0x91, 0x15, 0x84, 0x5d, 0xf8, 0x13, 0xa8, 0x91, 0xb1, 0x83, 0xb5, 0xd5, 0xd0, 0x64, + 0x8a, 0x86, 0xe4, 0x54, 0x89, 0x1a, 0x99, 0xe1, 0x7b, 0xf0, 0x77, 0x93, 0xe0, 0x49, 0xd5, 0x09, + 0xee, 0x36, 0xe3, 0x2e, 0x56, 0xa7, 0xbd, 0x27, 0x62, 0x67, 0xf8, 0x9c, 0x25, 0x4f, 0x17, 0xff, + 0x42, 0x6f, 0xb1, 0x0a, 0xd9, 0xf5, 0x2a, 0x64, 0x7f, 0x57, 0x21, 0xfb, 0xb9, 0x0e, 0xbd, 0xeb, + 0x75, 0xe8, 0xfd, 0x59, 0x87, 0xde, 0xc7, 0xe3, 0xca, 0x9b, 0x18, 0x35, 0x8a, 0xcb, 0xfb, 0xe2, + 0x7f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x4b, 0xab, 0x57, 0xa1, 0x33, 0x03, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -481,10 +406,6 @@ func (m *ExemplarsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.PartialResponseStrategy != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.PartialResponseStrategy)) i-- @@ -530,10 +451,6 @@ func (m *ExemplarsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -601,10 +518,6 @@ func (m *ExemplarData) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Exemplars) > 0 { for iNdEx := len(m.Exemplars) - 1; iNdEx >= 0; iNdEx-- { { @@ -619,18 +532,16 @@ func (m *ExemplarData) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x12 } } - if m.SeriesLabels != nil { - { - size, err := m.SeriesLabels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.SeriesLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0xa + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -654,10 +565,6 @@ func (m *Exemplar) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Ts != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Ts)) i-- @@ -669,18 +576,16 @@ func (m *Exemplar) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x11 } - if m.Labels != nil { - { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0xa + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -714,9 +619,6 @@ func (m *ExemplarsRequest) Size() (n int) { if m.PartialResponseStrategy != 0 { n += 1 + sovRpc(uint64(m.PartialResponseStrategy)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -729,9 +631,6 @@ func (m *ExemplarsResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -763,19 +662,14 @@ func (m *ExemplarData) Size() (n int) { } var l int _ = l - if m.SeriesLabels != nil { - l = m.SeriesLabels.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.SeriesLabels.Size() + n += 1 + l + sovRpc(uint64(l)) if len(m.Exemplars) > 0 { for _, e := range m.Exemplars { l = e.Size() n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -785,19 +679,14 @@ func (m *Exemplar) Size() (n int) { } var l int _ = l - if m.Labels != nil { - l = m.Labels.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.Labels.Size() + n += 1 + l + sovRpc(uint64(l)) if m.Value != 0 { n += 9 } if m.Ts != 0 { n += 1 + sovRpc(uint64(m.Ts)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -937,7 +826,6 @@ func (m *ExemplarsRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1055,7 +943,6 @@ func (m *ExemplarsResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1123,9 +1010,6 @@ func (m *ExemplarData) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.SeriesLabels == nil { - m.SeriesLabels = &labelpb.LabelSet{} - } if err := m.SeriesLabels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1176,7 +1060,6 @@ func (m *ExemplarData) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1244,9 +1127,6 @@ func (m *Exemplar) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Labels == nil { - m.Labels = &labelpb.LabelSet{} - } if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1293,7 +1173,6 @@ func (m *Exemplar) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/exemplars/exemplarspb/rpc.proto b/pkg/exemplars/exemplarspb/rpc.proto index e97a93a839..b02e642485 100644 --- a/pkg/exemplars/exemplarspb/rpc.proto +++ b/pkg/exemplars/exemplarspb/rpc.proto @@ -6,9 +6,21 @@ package thanos; import "store/storepb/types.proto"; import "store/labelpb/types.proto"; +import "gogoproto/gogo.proto"; option go_package = "exemplarspb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + /// Exemplars represents API that is responsible for gathering exemplars and their states. service Exemplars { /// Exemplars has info for all exemplars. @@ -31,17 +43,12 @@ message ExemplarsResponse { } message ExemplarData { - // @gotags: json:"seriesLabels" - LabelSet seriesLabels = 1; - // @gotags: json:"exemplars" - repeated Exemplar exemplars = 2; + LabelSet seriesLabels = 1 [(gogoproto.jsontag) = "seriesLabels", (gogoproto.nullable) = false]; + repeated Exemplar exemplars = 2 [(gogoproto.jsontag) = "exemplars"]; } message Exemplar { - // @gotags: json:"labels" - LabelSet labels = 1; - // @gotags: json:"value" - double value = 2; - // @gotags: json:"timestamp" - int64 ts = 3; + LabelSet labels = 1 [(gogoproto.jsontag) = "labels", (gogoproto.nullable) = false]; + double value = 2 [(gogoproto.jsontag) = "value"]; + int64 ts = 3 [(gogoproto.jsontag) = "timestamp"]; } diff --git a/pkg/exemplars/proxy_test.go b/pkg/exemplars/proxy_test.go index e49f8e5fbb..581d97abde 100644 --- a/pkg/exemplars/proxy_test.go +++ b/pkg/exemplars/proxy_test.go @@ -130,7 +130,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -143,7 +143,7 @@ func TestProxy(t *testing.T) { server: &testExemplarServer{}, wantResponses: []*exemplarspb.ExemplarsResponse{ exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -158,7 +158,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -171,7 +171,7 @@ func TestProxy(t *testing.T) { server: &testExemplarServer{}, wantResponses: []*exemplarspb.ExemplarsResponse{ exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -206,7 +206,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -228,7 +228,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -239,7 +239,7 @@ func TestProxy(t *testing.T) { server: &testExemplarServer{}, wantResponses: []*exemplarspb.ExemplarsResponse{ exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -254,7 +254,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -264,7 +264,7 @@ func TestProxy(t *testing.T) { server: &testExemplarServer{}, wantResponses: []*exemplarspb.ExemplarsResponse{ exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"__name__": "http_request_duration_bucket"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -279,7 +279,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -288,7 +288,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "baz"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "baz"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 2}}, }), }, @@ -298,7 +298,7 @@ func TestProxy(t *testing.T) { server: &testExemplarServer{}, wantResponses: []*exemplarspb.ExemplarsResponse{ exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -313,7 +313,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), }, @@ -322,7 +322,7 @@ func TestProxy(t *testing.T) { { ExemplarsClient: &testExemplarClient{ response: exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "baz"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "baz"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 2}}, }), }, @@ -332,11 +332,11 @@ func TestProxy(t *testing.T) { server: &testExemplarServer{}, wantResponses: []*exemplarspb.ExemplarsResponse{ exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "bar"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 1}}, }), exemplarspb.NewExemplarsResponse(&exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "baz"}))}, + SeriesLabels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromMap(map[string]string{"foo": "baz"}))}, Exemplars: []*exemplarspb.Exemplar{{Value: 2}}, }), }, diff --git a/pkg/exemplars/tsdb.go b/pkg/exemplars/tsdb.go index a382e25f08..ca569372bc 100644 --- a/pkg/exemplars/tsdb.go +++ b/pkg/exemplars/tsdb.go @@ -70,7 +70,7 @@ func (t *TSDB) Exemplars(matchers [][]*labels.Matcher, start, end int64, s exemp for _, e := range exemplars { exd := exemplarspb.ExemplarData{ - SeriesLabels: &labelpb.LabelSet{ + SeriesLabels: labelpb.LabelSet{ Labels: labelpb.PromLabelsToLabelpbLabels(labelpb.ExtendSortedLabels(e.SeriesLabels, t.getExtLabels())), }, Exemplars: exemplarspb.ExemplarsFromPromExemplars(e.Exemplars), diff --git a/pkg/info/info.go b/pkg/info/info.go index 71a75e1453..7ee65a43f8 100644 --- a/pkg/info/info.go +++ b/pkg/info/info.go @@ -20,7 +20,7 @@ type InfoServer struct { component string - getLabelSet func() []*labelpb.LabelSet + getLabelSet func() []labelpb.LabelSet getStoreInfo func() (*infopb.StoreInfo, error) getExemplarsInfo func() *infopb.ExemplarsInfo getRulesInfo func() *infopb.RulesInfo @@ -38,7 +38,7 @@ func NewInfoServer( srv := &InfoServer{ component: component, // By default, do not return info for any API. - getLabelSet: func() []*labelpb.LabelSet { return nil }, + getLabelSet: func() []labelpb.LabelSet { return nil }, getStoreInfo: func() (*infopb.StoreInfo, error) { return nil, nil }, getExemplarsInfo: func() *infopb.ExemplarsInfo { return nil }, getRulesInfo: func() *infopb.RulesInfo { return nil }, @@ -60,10 +60,10 @@ type ServerOptionFunc func(*InfoServer) // WithLabelSetFunc determines the function that should be executed to obtain // the label set information. If no function is provided, the default empty // label set is returned. Only the first function from the list is considered. -func WithLabelSetFunc(getLabelSet ...func() []*labelpb.LabelSet) ServerOptionFunc { +func WithLabelSetFunc(getLabelSet ...func() []labelpb.LabelSet) ServerOptionFunc { if len(getLabelSet) == 0 { return func(s *InfoServer) { - s.getLabelSet = func() []*labelpb.LabelSet { return []*labelpb.LabelSet{} } + s.getLabelSet = func() []labelpb.LabelSet { return []labelpb.LabelSet{} } } } diff --git a/pkg/info/infopb/custom.pb.go b/pkg/info/infopb/custom.pb.go index c04d692fb4..f4432d8b20 100644 --- a/pkg/info/infopb/custom.pb.go +++ b/pkg/info/infopb/custom.pb.go @@ -8,9 +8,9 @@ import ( "github.com/thanos-io/thanos/pkg/store/labelpb" ) -func NewTSDBInfo(mint, maxt int64, lbls []*labelpb.Label) *TSDBInfo { - return &TSDBInfo{ - Labels: &labelpb.LabelSet{ +func NewTSDBInfo(mint, maxt int64, lbls []labelpb.Label) TSDBInfo { + return TSDBInfo{ + Labels: labelpb.LabelSet{ Labels: lbls, }, MinTime: mint, @@ -18,7 +18,7 @@ func NewTSDBInfo(mint, maxt int64, lbls []*labelpb.Label) *TSDBInfo { } } -type TSDBInfos []*TSDBInfo +type TSDBInfos []TSDBInfo func (infos TSDBInfos) MaxT() int64 { var maxt int64 = math.MinInt64 diff --git a/pkg/info/infopb/rpc.pb.go b/pkg/info/infopb/rpc.pb.go index 188818dd83..9bdce73435 100644 --- a/pkg/info/infopb/rpc.pb.go +++ b/pkg/info/infopb/rpc.pb.go @@ -10,6 +10,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" grpc "google.golang.org/grpc" @@ -29,9 +30,6 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type InfoRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *InfoRequest) Reset() { *m = InfoRequest{} } @@ -68,8 +66,8 @@ func (m *InfoRequest) XXX_DiscardUnknown() { var xxx_messageInfo_InfoRequest proto.InternalMessageInfo type InfoResponse struct { - LabelSets []*labelpb.LabelSet `protobuf:"bytes,1,rep,name=label_sets,json=labelSets,proto3" json:"label_sets,omitempty"` - ComponentType string `protobuf:"bytes,2,opt,name=ComponentType,proto3" json:"ComponentType,omitempty"` + LabelSets []labelpb.LabelSet `protobuf:"bytes,1,rep,name=label_sets,json=labelSets,proto3" json:"label_sets"` + ComponentType string `protobuf:"bytes,2,opt,name=ComponentType,proto3" json:"ComponentType,omitempty"` // StoreInfo holds the metadata related to Store API if exposed by the component otherwise it will be null. Store *StoreInfo `protobuf:"bytes,3,opt,name=store,proto3" json:"store,omitempty"` // RulesInfo holds the metadata related to Rules API if exposed by the component otherwise it will be null. @@ -81,10 +79,7 @@ type InfoResponse struct { // ExemplarsInfo holds the metadata related to Exemplars API if exposed by the component otherwise it will be null. Exemplars *ExemplarsInfo `protobuf:"bytes,7,opt,name=exemplars,proto3" json:"exemplars,omitempty"` // QueryAPIInfo holds the metadata related to Query API if exposed by the component, otherwise it will be null. - Query *QueryAPIInfo `protobuf:"bytes,8,opt,name=query,proto3" json:"query,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Query *QueryAPIInfo `protobuf:"bytes,8,opt,name=query,proto3" json:"query,omitempty"` } func (m *InfoResponse) Reset() { *m = InfoResponse{} } @@ -120,62 +115,6 @@ func (m *InfoResponse) XXX_DiscardUnknown() { var xxx_messageInfo_InfoResponse proto.InternalMessageInfo -func (m *InfoResponse) GetLabelSets() []*labelpb.LabelSet { - if m != nil { - return m.LabelSets - } - return nil -} - -func (m *InfoResponse) GetComponentType() string { - if m != nil { - return m.ComponentType - } - return "" -} - -func (m *InfoResponse) GetStore() *StoreInfo { - if m != nil { - return m.Store - } - return nil -} - -func (m *InfoResponse) GetRules() *RulesInfo { - if m != nil { - return m.Rules - } - return nil -} - -func (m *InfoResponse) GetMetricMetadata() *MetricMetadataInfo { - if m != nil { - return m.MetricMetadata - } - return nil -} - -func (m *InfoResponse) GetTargets() *TargetsInfo { - if m != nil { - return m.Targets - } - return nil -} - -func (m *InfoResponse) GetExemplars() *ExemplarsInfo { - if m != nil { - return m.Exemplars - } - return nil -} - -func (m *InfoResponse) GetQuery() *QueryAPIInfo { - if m != nil { - return m.Query - } - return nil -} - // StoreInfo holds the metadata related to Store API exposed by the component. type StoreInfo struct { MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` @@ -184,10 +123,7 @@ type StoreInfo struct { // replica_aware means this store supports without_replica_labels of StoreAPI.Series. SupportsWithoutReplicaLabels bool `protobuf:"varint,5,opt,name=supports_without_replica_labels,json=supportsWithoutReplicaLabels,proto3" json:"supports_without_replica_labels,omitempty"` // TSDBInfos holds metadata for all TSDBs exposed by the store. - TsdbInfos []*TSDBInfo `protobuf:"bytes,6,rep,name=tsdb_infos,json=tsdbInfos,proto3" json:"tsdb_infos,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + TsdbInfos []TSDBInfo `protobuf:"bytes,6,rep,name=tsdb_infos,json=tsdbInfos,proto3" json:"tsdb_infos"` } func (m *StoreInfo) Reset() { *m = StoreInfo{} } @@ -223,46 +159,8 @@ func (m *StoreInfo) XXX_DiscardUnknown() { var xxx_messageInfo_StoreInfo proto.InternalMessageInfo -func (m *StoreInfo) GetMinTime() int64 { - if m != nil { - return m.MinTime - } - return 0 -} - -func (m *StoreInfo) GetMaxTime() int64 { - if m != nil { - return m.MaxTime - } - return 0 -} - -func (m *StoreInfo) GetSupportsSharding() bool { - if m != nil { - return m.SupportsSharding - } - return false -} - -func (m *StoreInfo) GetSupportsWithoutReplicaLabels() bool { - if m != nil { - return m.SupportsWithoutReplicaLabels - } - return false -} - -func (m *StoreInfo) GetTsdbInfos() []*TSDBInfo { - if m != nil { - return m.TsdbInfos - } - return nil -} - // RulesInfo holds the metadata related to Rules API exposed by the component. type RulesInfo struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *RulesInfo) Reset() { *m = RulesInfo{} } @@ -300,9 +198,6 @@ var xxx_messageInfo_RulesInfo proto.InternalMessageInfo // MetricMetadataInfo holds the metadata related to Metadata API exposed by the component. type MetricMetadataInfo struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *MetricMetadataInfo) Reset() { *m = MetricMetadataInfo{} } @@ -340,9 +235,6 @@ var xxx_messageInfo_MetricMetadataInfo proto.InternalMessageInfo // TargetsInfo holds the metadata related to Targets API exposed by the component. type TargetsInfo struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *TargetsInfo) Reset() { *m = TargetsInfo{} } @@ -380,11 +272,8 @@ var xxx_messageInfo_TargetsInfo proto.InternalMessageInfo // ExemplarsInfo holds the metadata related to Exemplars API exposed by the component. type ExemplarsInfo struct { - MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` - MaxTime int64 `protobuf:"varint,2,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` + MaxTime int64 `protobuf:"varint,2,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` } func (m *ExemplarsInfo) Reset() { *m = ExemplarsInfo{} } @@ -420,25 +309,8 @@ func (m *ExemplarsInfo) XXX_DiscardUnknown() { var xxx_messageInfo_ExemplarsInfo proto.InternalMessageInfo -func (m *ExemplarsInfo) GetMinTime() int64 { - if m != nil { - return m.MinTime - } - return 0 -} - -func (m *ExemplarsInfo) GetMaxTime() int64 { - if m != nil { - return m.MaxTime - } - return 0 -} - // QueryInfo holds the metadata related to Query API exposed by the component. type QueryAPIInfo struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *QueryAPIInfo) Reset() { *m = QueryAPIInfo{} } @@ -475,12 +347,9 @@ func (m *QueryAPIInfo) XXX_DiscardUnknown() { var xxx_messageInfo_QueryAPIInfo proto.InternalMessageInfo type TSDBInfo struct { - Labels *labelpb.LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels,omitempty"` - MinTime int64 `protobuf:"varint,2,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` - MaxTime int64 `protobuf:"varint,3,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels labelpb.LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + MinTime int64 `protobuf:"varint,2,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` + MaxTime int64 `protobuf:"varint,3,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` } func (m *TSDBInfo) Reset() { *m = TSDBInfo{} } @@ -516,27 +385,6 @@ func (m *TSDBInfo) XXX_DiscardUnknown() { var xxx_messageInfo_TSDBInfo proto.InternalMessageInfo -func (m *TSDBInfo) GetLabels() *labelpb.LabelSet { - if m != nil { - return m.Labels - } - return nil -} - -func (m *TSDBInfo) GetMinTime() int64 { - if m != nil { - return m.MinTime - } - return 0 -} - -func (m *TSDBInfo) GetMaxTime() int64 { - if m != nil { - return m.MaxTime - } - return 0 -} - func init() { proto.RegisterType((*InfoRequest)(nil), "thanos.info.InfoRequest") proto.RegisterType((*InfoResponse)(nil), "thanos.info.InfoResponse") @@ -552,42 +400,44 @@ func init() { func init() { proto.RegisterFile("info/infopb/rpc.proto", fileDescriptor_a1214ec45d2bf952) } var fileDescriptor_a1214ec45d2bf952 = []byte{ - // 547 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x94, 0xdd, 0x8a, 0xd3, 0x40, - 0x14, 0xc7, 0x49, 0x3f, 0xd3, 0xd3, 0xed, 0x5a, 0x87, 0x5d, 0x49, 0x8b, 0x74, 0x4b, 0xf0, 0xa2, - 0xa0, 0x34, 0x50, 0xbd, 0x10, 0xbc, 0x72, 0xd7, 0x82, 0x2b, 0x2e, 0x68, 0x5a, 0x10, 0xbc, 0x09, - 0x69, 0x3b, 0xbb, 0x8d, 0x24, 0x99, 0xd9, 0x99, 0x29, 0xb6, 0xaf, 0xe4, 0x93, 0x78, 0xe9, 0x23, - 0x48, 0xdf, 0xc0, 0x37, 0x90, 0x39, 0x93, 0xd6, 0x0e, 0xbb, 0xde, 0xec, 0x4d, 0x9b, 0xcc, 0xff, - 0xf7, 0x9f, 0x39, 0x5f, 0x13, 0x38, 0x4d, 0xf2, 0x6b, 0x16, 0xe8, 0x1f, 0x3e, 0x0b, 0x04, 0x9f, - 0x0f, 0xb9, 0x60, 0x8a, 0x91, 0xa6, 0x5a, 0xc6, 0x39, 0x93, 0x43, 0x2d, 0x74, 0x3b, 0x52, 0x31, - 0x41, 0x83, 0x34, 0x9e, 0xd1, 0x94, 0xcf, 0x02, 0xb5, 0xe1, 0x54, 0x1a, 0xce, 0x6f, 0x41, 0xf3, - 0x32, 0xbf, 0x66, 0x21, 0xbd, 0x5d, 0x51, 0xa9, 0xfc, 0x1f, 0x65, 0x38, 0x32, 0xef, 0x92, 0xb3, - 0x5c, 0x52, 0x12, 0x00, 0xa0, 0x2d, 0x92, 0x54, 0x49, 0xcf, 0xe9, 0x97, 0x07, 0xcd, 0x51, 0x7b, - 0x58, 0x6c, 0xfe, 0x51, 0x2b, 0x13, 0xaa, 0xc2, 0x46, 0x5a, 0x3c, 0x49, 0xf2, 0x0c, 0x5a, 0x17, - 0x2c, 0xe3, 0x2c, 0xa7, 0xb9, 0x9a, 0x6e, 0x38, 0xf5, 0x4a, 0x7d, 0x67, 0xd0, 0x08, 0xed, 0x45, - 0xf2, 0x02, 0xaa, 0x18, 0x93, 0x57, 0xee, 0x3b, 0x83, 0xe6, 0xe8, 0xc9, 0xf0, 0x20, 0xdc, 0xe1, - 0x44, 0x2b, 0x18, 0x85, 0x81, 0x34, 0x2d, 0x56, 0x29, 0x95, 0x5e, 0xe5, 0x1e, 0x3a, 0xd4, 0x8a, - 0xa1, 0x11, 0x22, 0xef, 0xe1, 0x51, 0x46, 0x95, 0x48, 0xe6, 0x51, 0x46, 0x55, 0xbc, 0x88, 0x55, - 0xec, 0x55, 0xd1, 0x77, 0x66, 0xf9, 0xae, 0x90, 0xb9, 0x2a, 0x10, 0xdc, 0xe0, 0x38, 0xb3, 0xd6, - 0xc8, 0x08, 0xea, 0x2a, 0x16, 0x37, 0x3a, 0xf3, 0x1a, 0xee, 0xe0, 0x59, 0x3b, 0x4c, 0x8d, 0x86, - 0xd6, 0x1d, 0x48, 0x5e, 0x43, 0x83, 0xae, 0x69, 0xc6, 0xd3, 0x58, 0x48, 0xaf, 0x8e, 0xae, 0xae, - 0xe5, 0x1a, 0xef, 0x54, 0xf4, 0xfd, 0x83, 0x49, 0x00, 0xd5, 0xdb, 0x15, 0x15, 0x1b, 0xcf, 0x45, - 0x57, 0xc7, 0x72, 0x7d, 0xd6, 0xca, 0xdb, 0x4f, 0x97, 0x26, 0x51, 0xe4, 0xfc, 0x3f, 0x0e, 0x34, - 0xf6, 0xb5, 0x22, 0x1d, 0x70, 0xb3, 0x24, 0x8f, 0x54, 0x92, 0x51, 0xcf, 0xe9, 0x3b, 0x83, 0x72, - 0x58, 0xcf, 0x92, 0x7c, 0x9a, 0x64, 0x14, 0xa5, 0x78, 0x6d, 0xa4, 0x52, 0x21, 0xc5, 0x6b, 0x94, - 0x9e, 0xc3, 0x63, 0xb9, 0xe2, 0x9c, 0x09, 0x25, 0x23, 0xb9, 0x8c, 0xc5, 0x22, 0xc9, 0x6f, 0xb0, - 0x29, 0x6e, 0xd8, 0xde, 0x09, 0x93, 0x62, 0x9d, 0x8c, 0xe1, 0x6c, 0x0f, 0x7f, 0x4f, 0xd4, 0x92, - 0xad, 0x54, 0x24, 0x28, 0x4f, 0x93, 0x79, 0x1c, 0xe1, 0x04, 0x48, 0xac, 0xb4, 0x1b, 0x3e, 0xdd, - 0x61, 0x5f, 0x0c, 0x15, 0x1a, 0x08, 0xe7, 0x45, 0x92, 0x57, 0x00, 0x4a, 0x2e, 0x66, 0x91, 0x4e, - 0x4c, 0x57, 0x56, 0xcf, 0xd4, 0xa9, 0x5d, 0xd9, 0xc9, 0xbb, 0x73, 0x53, 0x1e, 0x0d, 0xea, 0x27, - 0xf9, 0xa1, 0xe2, 0x56, 0xda, 0x55, 0xbf, 0x09, 0x8d, 0x7d, 0xc3, 0xfd, 0x13, 0x20, 0x77, 0xbb, - 0xa8, 0x47, 0xfa, 0xa0, 0x33, 0xfe, 0x18, 0x5a, 0x56, 0xc9, 0x1f, 0x56, 0x28, 0xff, 0x18, 0x8e, - 0x0e, 0x7b, 0xe0, 0x7f, 0x03, 0x77, 0x17, 0x25, 0x19, 0x40, 0xad, 0x48, 0xdf, 0xc1, 0xd6, 0xdd, - 0xbd, 0x20, 0x85, 0x6e, 0x9d, 0x5d, 0xfa, 0xff, 0xd9, 0x65, 0xeb, 0xec, 0xd1, 0x05, 0x54, 0xf0, - 0x9c, 0x37, 0xc5, 0xbf, 0x3d, 0x86, 0x07, 0xf7, 0xb7, 0xdb, 0xb9, 0x47, 0x31, 0x37, 0xf9, 0xfc, - 0xe4, 0xe7, 0xb6, 0xe7, 0xfc, 0xda, 0xf6, 0x9c, 0xdf, 0xdb, 0x9e, 0xf3, 0xb5, 0x66, 0xbe, 0x18, - 0xb3, 0x1a, 0x7e, 0x06, 0x5e, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xdd, 0xc2, 0x54, 0xcb, 0x47, - 0x04, 0x00, 0x00, + // 586 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x94, 0xcf, 0x8e, 0xd2, 0x40, + 0x1c, 0xc7, 0xe9, 0xf2, 0xaf, 0xfc, 0x58, 0x56, 0x9c, 0xec, 0x9a, 0x42, 0x4c, 0x21, 0xcd, 0x1e, + 0x48, 0x34, 0x6d, 0x82, 0x31, 0x31, 0x7a, 0x92, 0x95, 0xc4, 0x35, 0x6e, 0xa2, 0x85, 0xc4, 0xc4, + 0x4b, 0x53, 0x60, 0x16, 0x9a, 0xb4, 0x9d, 0xd9, 0xce, 0x10, 0xe1, 0x2d, 0x7c, 0x14, 0x1f, 0x83, + 0xe3, 0x1e, 0x3d, 0x19, 0x85, 0x87, 0xf0, 0x6a, 0x66, 0xa6, 0x45, 0x1a, 0x77, 0x2f, 0x5e, 0xa0, + 0x33, 0xdf, 0xcf, 0x77, 0xfa, 0xfb, 0x37, 0x85, 0xb3, 0x20, 0xbe, 0x26, 0x8e, 0xf8, 0xa1, 0x13, + 0x27, 0xa1, 0x53, 0x9b, 0x26, 0x84, 0x13, 0x54, 0xe7, 0x0b, 0x3f, 0x26, 0xcc, 0x16, 0x42, 0xbb, + 0xc5, 0x38, 0x49, 0xb0, 0x13, 0xfa, 0x13, 0x1c, 0xd2, 0x89, 0xc3, 0xd7, 0x14, 0x33, 0xc5, 0xb5, + 0x4f, 0xe7, 0x64, 0x4e, 0xe4, 0xa3, 0x23, 0x9e, 0xd4, 0xae, 0xd5, 0x80, 0xfa, 0x65, 0x7c, 0x4d, + 0x5c, 0x7c, 0xb3, 0xc4, 0x8c, 0x5b, 0xdf, 0x8a, 0x70, 0xac, 0xd6, 0x8c, 0x92, 0x98, 0x61, 0xf4, + 0x1c, 0x40, 0x1e, 0xe6, 0x31, 0xcc, 0x99, 0xa1, 0x75, 0x8b, 0xbd, 0x7a, 0xbf, 0x69, 0xa7, 0xaf, + 0x7c, 0x2f, 0x94, 0x11, 0xe6, 0x83, 0xd2, 0xe6, 0x47, 0xa7, 0xe0, 0xd6, 0xc2, 0x74, 0xcd, 0xd0, + 0x39, 0x34, 0x2e, 0x48, 0x44, 0x49, 0x8c, 0x63, 0x3e, 0x5e, 0x53, 0x6c, 0x1c, 0x75, 0xb5, 0x5e, + 0xcd, 0xcd, 0x6f, 0xa2, 0xa7, 0x50, 0x96, 0xf1, 0x1a, 0xc5, 0xae, 0xd6, 0xab, 0xf7, 0x1f, 0xd9, + 0x07, 0xa9, 0xd8, 0x23, 0xa1, 0xc8, 0x58, 0x14, 0x24, 0xe8, 0x64, 0x19, 0x62, 0x66, 0x94, 0xee, + 0xa0, 0x5d, 0xa1, 0x28, 0x5a, 0x42, 0xe8, 0x2d, 0x3c, 0x88, 0x30, 0x4f, 0x82, 0xa9, 0x17, 0x61, + 0xee, 0xcf, 0x7c, 0xee, 0x1b, 0x65, 0xe9, 0xeb, 0xe4, 0x7c, 0x57, 0x92, 0xb9, 0x4a, 0x11, 0x79, + 0xc0, 0x49, 0x94, 0xdb, 0x43, 0x7d, 0xa8, 0x72, 0x3f, 0x99, 0x8b, 0xfc, 0x2b, 0xf2, 0x04, 0x23, + 0x77, 0xc2, 0x58, 0x69, 0xd2, 0x9a, 0x81, 0xe8, 0x05, 0xd4, 0xf0, 0x0a, 0x47, 0x34, 0xf4, 0x13, + 0x66, 0x54, 0xa5, 0xab, 0x9d, 0x73, 0x0d, 0x33, 0x55, 0xfa, 0xfe, 0xc2, 0xc8, 0x81, 0xf2, 0xcd, + 0x12, 0x27, 0x6b, 0x43, 0x97, 0xae, 0x56, 0xce, 0xf5, 0x51, 0x28, 0xaf, 0x3f, 0x5c, 0xaa, 0x44, + 0x25, 0x67, 0xfd, 0xd6, 0xa0, 0xb6, 0xaf, 0x15, 0x6a, 0x81, 0x1e, 0x05, 0xb1, 0xc7, 0x83, 0x08, + 0x1b, 0x5a, 0x57, 0xeb, 0x15, 0xdd, 0x6a, 0x14, 0xc4, 0xe3, 0x20, 0xc2, 0x52, 0xf2, 0x57, 0x4a, + 0x3a, 0x4a, 0x25, 0x7f, 0x25, 0xa5, 0x27, 0xf0, 0x90, 0x2d, 0x29, 0x25, 0x09, 0x67, 0x1e, 0x5b, + 0xf8, 0xc9, 0x2c, 0x88, 0xe7, 0xb2, 0x29, 0xba, 0xdb, 0xcc, 0x84, 0x51, 0xba, 0x8f, 0x86, 0xd0, + 0xd9, 0xc3, 0x5f, 0x02, 0xbe, 0x20, 0x4b, 0xee, 0x25, 0x98, 0x86, 0xc1, 0xd4, 0xf7, 0xe4, 0x04, + 0x30, 0x59, 0x69, 0xdd, 0x7d, 0x9c, 0x61, 0x9f, 0x14, 0xe5, 0x2a, 0x48, 0x4e, 0x0d, 0x43, 0x2f, + 0x01, 0x38, 0x9b, 0x4d, 0x3c, 0x91, 0x98, 0xa8, 0xac, 0x98, 0xac, 0xb3, 0x7c, 0x65, 0x47, 0x6f, + 0x06, 0x22, 0xa9, 0x6c, 0xbc, 0x04, 0x2e, 0xd6, 0xec, 0x5d, 0x49, 0x2f, 0x35, 0xcb, 0x56, 0x1d, + 0x6a, 0xfb, 0xb6, 0x5b, 0xa7, 0x80, 0xfe, 0xed, 0xa5, 0x18, 0xef, 0x83, 0xfe, 0x58, 0x43, 0x68, + 0xe4, 0x0a, 0xff, 0x7f, 0xe5, 0xb2, 0x4e, 0xe0, 0xf8, 0xb0, 0x13, 0x16, 0x05, 0x3d, 0x8b, 0x15, + 0xd9, 0x50, 0x49, 0x8b, 0xa0, 0xc9, 0x06, 0xde, 0x77, 0x59, 0x52, 0x2a, 0x17, 0xc1, 0xd1, 0xfd, + 0x11, 0x14, 0x73, 0x11, 0xf4, 0x2f, 0xa0, 0x24, 0xdf, 0xf6, 0x2a, 0xfd, 0xcf, 0x8f, 0xe4, 0xc1, + 0x8d, 0x6e, 0xb7, 0xee, 0x50, 0xd4, 0xdd, 0x1e, 0x9c, 0x6f, 0x7e, 0x99, 0x85, 0xcd, 0xd6, 0xd4, + 0x6e, 0xb7, 0xa6, 0xf6, 0x73, 0x6b, 0x6a, 0x5f, 0x77, 0x66, 0xe1, 0x76, 0x67, 0x16, 0xbe, 0xef, + 0xcc, 0xc2, 0xe7, 0x8a, 0xfa, 0xd2, 0x4c, 0x2a, 0xf2, 0x43, 0xf1, 0xec, 0x4f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xd4, 0x0a, 0xee, 0xfb, 0x7f, 0x04, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -692,10 +542,6 @@ func (m *InfoRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } return len(dAtA) - i, nil } @@ -719,10 +565,6 @@ func (m *InfoResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Query != nil { { size, err := m.Query.MarshalToSizedBuffer(dAtA[:i]) @@ -839,10 +681,6 @@ func (m *StoreInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.TsdbInfos) > 0 { for iNdEx := len(m.TsdbInfos) - 1; iNdEx >= 0; iNdEx-- { { @@ -910,10 +748,6 @@ func (m *RulesInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } return len(dAtA) - i, nil } @@ -937,10 +771,6 @@ func (m *MetricMetadataInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } return len(dAtA) - i, nil } @@ -964,10 +794,6 @@ func (m *TargetsInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } return len(dAtA) - i, nil } @@ -991,10 +817,6 @@ func (m *ExemplarsInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.MaxTime != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.MaxTime)) i-- @@ -1028,10 +850,6 @@ func (m *QueryAPIInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } return len(dAtA) - i, nil } @@ -1055,10 +873,6 @@ func (m *TSDBInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.MaxTime != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.MaxTime)) i-- @@ -1069,18 +883,16 @@ func (m *TSDBInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x10 } - if m.Labels != nil { - { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0xa + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -1101,9 +913,6 @@ func (m *InfoRequest) Size() (n int) { } var l int _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1147,9 +956,6 @@ func (m *InfoResponse) Size() (n int) { l = m.Query.Size() n += 1 + l + sovRpc(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1177,9 +983,6 @@ func (m *StoreInfo) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1189,9 +992,6 @@ func (m *RulesInfo) Size() (n int) { } var l int _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1201,9 +1001,6 @@ func (m *MetricMetadataInfo) Size() (n int) { } var l int _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1213,9 +1010,6 @@ func (m *TargetsInfo) Size() (n int) { } var l int _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1231,9 +1025,6 @@ func (m *ExemplarsInfo) Size() (n int) { if m.MaxTime != 0 { n += 1 + sovRpc(uint64(m.MaxTime)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1243,9 +1034,6 @@ func (m *QueryAPIInfo) Size() (n int) { } var l int _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1255,19 +1043,14 @@ func (m *TSDBInfo) Size() (n int) { } var l int _ = l - if m.Labels != nil { - l = m.Labels.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.Labels.Size() + n += 1 + l + sovRpc(uint64(l)) if m.MinTime != 0 { n += 1 + sovRpc(uint64(m.MinTime)) } if m.MaxTime != 0 { n += 1 + sovRpc(uint64(m.MaxTime)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1318,7 +1101,6 @@ func (m *InfoRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1386,7 +1168,7 @@ func (m *InfoResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.LabelSets = append(m.LabelSets, &labelpb.LabelSet{}) + m.LabelSets = append(m.LabelSets, labelpb.LabelSet{}) if err := m.LabelSets[len(m.LabelSets)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1651,7 +1433,6 @@ func (m *InfoResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1797,7 +1578,7 @@ func (m *StoreInfo) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.TsdbInfos = append(m.TsdbInfos, &TSDBInfo{}) + m.TsdbInfos = append(m.TsdbInfos, TSDBInfo{}) if err := m.TsdbInfos[len(m.TsdbInfos)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1814,7 +1595,6 @@ func (m *StoreInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1865,7 +1645,6 @@ func (m *RulesInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1916,7 +1695,6 @@ func (m *MetricMetadataInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1967,7 +1745,6 @@ func (m *TargetsInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2056,7 +1833,6 @@ func (m *ExemplarsInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2107,7 +1883,6 @@ func (m *QueryAPIInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2175,9 +1950,6 @@ func (m *TSDBInfo) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Labels == nil { - m.Labels = &labelpb.LabelSet{} - } if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2232,7 +2004,6 @@ func (m *TSDBInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/info/infopb/rpc.proto b/pkg/info/infopb/rpc.proto index 6c1111a023..d41ba12905 100644 --- a/pkg/info/infopb/rpc.proto +++ b/pkg/info/infopb/rpc.proto @@ -5,9 +5,21 @@ syntax = "proto3"; package thanos.info; import "store/labelpb/types.proto"; +import "gogoproto/gogo.proto"; option go_package = "infopb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + // Info represents the API that is responsible for gathering metadata about the all APIs supported by the component. service Info { // Info returns the metadata (Eg. LabelSets, Min/Max time) about all the APIs the component supports. @@ -17,7 +29,7 @@ service Info { message InfoRequest {} message InfoResponse { - repeated LabelSet label_sets = 1; + repeated LabelSet label_sets = 1 [(gogoproto.nullable) = false]; string ComponentType = 2; // StoreInfo holds the metadata related to Store API if exposed by the component otherwise it will be null. @@ -51,7 +63,7 @@ message StoreInfo { bool supports_without_replica_labels = 5; // TSDBInfos holds metadata for all TSDBs exposed by the store. - repeated TSDBInfo tsdb_infos = 6; + repeated TSDBInfo tsdb_infos = 6 [(gogoproto.nullable) = false]; } // RulesInfo holds the metadata related to Rules API exposed by the component. @@ -77,7 +89,7 @@ message QueryAPIInfo { } message TSDBInfo { - LabelSet labels = 1; + LabelSet labels = 1 [(gogoproto.nullable) = false]; int64 min_time = 2; int64 max_time = 3; diff --git a/pkg/metadata/metadata.go b/pkg/metadata/metadata.go index 30b0aca41d..8067ee224c 100644 --- a/pkg/metadata/metadata.go +++ b/pkg/metadata/metadata.go @@ -19,7 +19,7 @@ var _ UnaryClient = &GRPCClient{} // UnaryClient is a gRPC metadatapb.Metadata client which expands streaming metadata API. Useful for consumers that does not // support streaming. type UnaryClient interface { - MetricMetadata(ctx context.Context, req *metadatapb.MetricMetadataRequest) (map[string][]*metadatapb.Meta, annotations.Annotations, error) + MetricMetadata(ctx context.Context, req *metadatapb.MetricMetadataRequest) (map[string][]metadatapb.Meta, annotations.Annotations, error) } // GRPCClient allows to retrieve metadata from local gRPC streaming server implementation. @@ -34,7 +34,7 @@ func NewGRPCClient(ts metadatapb.MetadataServer) *GRPCClient { } } -func (rr *GRPCClient) MetricMetadata(ctx context.Context, req *metadatapb.MetricMetadataRequest) (map[string][]*metadatapb.Meta, annotations.Annotations, error) { +func (rr *GRPCClient) MetricMetadata(ctx context.Context, req *metadatapb.MetricMetadataRequest) (map[string][]metadatapb.Meta, annotations.Annotations, error) { span, ctx := tracing.StartSpan(ctx, "metadata_grpc_request") defer span.Finish() @@ -42,14 +42,14 @@ func (rr *GRPCClient) MetricMetadata(ctx context.Context, req *metadatapb.Metric if req.Limit >= 0 { if req.Metric != "" { - srv.metadataMap = make(map[string][]*metadatapb.Meta, 1) + srv.metadataMap = make(map[string][]metadatapb.Meta, 1) } else if req.Limit <= 100 { - srv.metadataMap = make(map[string][]*metadatapb.Meta, req.Limit) + srv.metadataMap = make(map[string][]metadatapb.Meta, req.Limit) } else { - srv.metadataMap = make(map[string][]*metadatapb.Meta) + srv.metadataMap = make(map[string][]metadatapb.Meta) } } else { - srv.metadataMap = make(map[string][]*metadatapb.Meta) + srv.metadataMap = make(map[string][]metadatapb.Meta) } if err := rr.proxy.MetricMetadata(req, srv); err != nil { @@ -68,7 +68,7 @@ type metadataServer struct { limit int warnings annotations.Annotations - metadataMap map[string][]*metadatapb.Meta + metadataMap map[string][]metadatapb.Meta mu sync.Mutex } @@ -102,7 +102,7 @@ func (srv *metadataServer) Send(res *metadatapb.MetricMetadataResponse) error { Outer: for _, meta := range v.Metas { for _, m := range metadata { - if meta.Equal(m) { + if meta == m { continue Outer } } diff --git a/pkg/metadata/metadatapb/custom.go b/pkg/metadata/metadatapb/custom.go index 8b9f662025..697277aaa3 100644 --- a/pkg/metadata/metadatapb/custom.go +++ b/pkg/metadata/metadatapb/custom.go @@ -3,25 +3,9 @@ package metadatapb -func (m *Meta) Equal(o *Meta) bool { - if m == nil && o == nil { - return true - } - if m == nil || o == nil { - return false - } - if m.Type != o.Type { - return false - } - if m.Help != o.Help { - return false - } - if m.Unit != o.Unit { - return false - } - - return true -} +import ( + "unsafe" +) func NewMetricMetadataResponse(metadata *MetricMetadata) *MetricMetadataResponse { return &MetricMetadataResponse{ @@ -40,16 +24,5 @@ func NewWarningMetadataResponse(warning error) *MetricMetadataResponse { } func FromMetadataMap(m map[string][]Meta) *MetricMetadata { - mt := make(map[string]*MetricMetadataEntry, len(m)) - for k, v := range m { - metas := make([]*Meta, len(v)) - for i, meta := range v { - meta := meta - - metas[i] = &meta - } - - mt[k] = &MetricMetadataEntry{Metas: metas} - } - return &MetricMetadata{Metadata: mt} + return &MetricMetadata{Metadata: *(*map[string]MetricMetadataEntry)(unsafe.Pointer(&m))} } diff --git a/pkg/metadata/metadatapb/rpc.pb.go b/pkg/metadata/metadatapb/rpc.pb.go index 0fb9fac658..e7b51b665a 100644 --- a/pkg/metadata/metadatapb/rpc.pb.go +++ b/pkg/metadata/metadatapb/rpc.pb.go @@ -10,6 +10,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" storepb "github.com/thanos-io/thanos/pkg/store/storepb" grpc "google.golang.org/grpc" @@ -32,9 +33,6 @@ type MetricMetadataRequest struct { Metric string `protobuf:"bytes,1,opt,name=metric,proto3" json:"metric,omitempty"` Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` PartialResponseStrategy storepb.PartialResponseStrategy `protobuf:"varint,3,opt,name=partial_response_strategy,json=partialResponseStrategy,proto3,enum=thanos.PartialResponseStrategy" json:"partial_response_strategy,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *MetricMetadataRequest) Reset() { *m = MetricMetadataRequest{} } @@ -70,35 +68,11 @@ func (m *MetricMetadataRequest) XXX_DiscardUnknown() { var xxx_messageInfo_MetricMetadataRequest proto.InternalMessageInfo -func (m *MetricMetadataRequest) GetMetric() string { - if m != nil { - return m.Metric - } - return "" -} - -func (m *MetricMetadataRequest) GetLimit() int32 { - if m != nil { - return m.Limit - } - return 0 -} - -func (m *MetricMetadataRequest) GetPartialResponseStrategy() storepb.PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return storepb.PartialResponseStrategy_WARN -} - type MetricMetadataResponse struct { // Types that are valid to be assigned to Result: // *MetricMetadataResponse_Metadata // *MetricMetadataResponse_Warning - Result isMetricMetadataResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isMetricMetadataResponse_Result `protobuf_oneof:"result"` } func (m *MetricMetadataResponse) Reset() { *m = MetricMetadataResponse{} } @@ -180,10 +154,7 @@ func (*MetricMetadataResponse) XXX_OneofWrappers() []interface{} { } type MetricMetadata struct { - Metadata map[string]*MetricMetadataEntry `protobuf:"bytes,1,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Metadata map[string]MetricMetadataEntry `protobuf:"bytes,1,rep,name=metadata,proto3" json:"metadata" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (m *MetricMetadata) Reset() { *m = MetricMetadata{} } @@ -219,18 +190,8 @@ func (m *MetricMetadata) XXX_DiscardUnknown() { var xxx_messageInfo_MetricMetadata proto.InternalMessageInfo -func (m *MetricMetadata) GetMetadata() map[string]*MetricMetadataEntry { - if m != nil { - return m.Metadata - } - return nil -} - type MetricMetadataEntry struct { - Metas []*Meta `protobuf:"bytes,2,rep,name=metas,proto3" json:"metas,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Metas []Meta `protobuf:"bytes,2,rep,name=metas,proto3" json:"metas"` } func (m *MetricMetadataEntry) Reset() { *m = MetricMetadataEntry{} } @@ -266,23 +227,10 @@ func (m *MetricMetadataEntry) XXX_DiscardUnknown() { var xxx_messageInfo_MetricMetadataEntry proto.InternalMessageInfo -func (m *MetricMetadataEntry) GetMetas() []*Meta { - if m != nil { - return m.Metas - } - return nil -} - type Meta struct { - // @gotags: json:"type" Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type"` - // @gotags: json:"help" Help string `protobuf:"bytes,2,opt,name=help,proto3" json:"help"` - // @gotags: json:"unit" - Unit string `protobuf:"bytes,3,opt,name=unit,proto3" json:"unit"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Unit string `protobuf:"bytes,3,opt,name=unit,proto3" json:"unit"` } func (m *Meta) Reset() { *m = Meta{} } @@ -318,32 +266,11 @@ func (m *Meta) XXX_DiscardUnknown() { var xxx_messageInfo_Meta proto.InternalMessageInfo -func (m *Meta) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *Meta) GetHelp() string { - if m != nil { - return m.Help - } - return "" -} - -func (m *Meta) GetUnit() string { - if m != nil { - return m.Unit - } - return "" -} - func init() { proto.RegisterType((*MetricMetadataRequest)(nil), "thanos.MetricMetadataRequest") proto.RegisterType((*MetricMetadataResponse)(nil), "thanos.MetricMetadataResponse") proto.RegisterType((*MetricMetadata)(nil), "thanos.MetricMetadata") - proto.RegisterMapType((map[string]*MetricMetadataEntry)(nil), "thanos.MetricMetadata.MetadataEntry") + proto.RegisterMapType((map[string]MetricMetadataEntry)(nil), "thanos.MetricMetadata.MetadataEntry") proto.RegisterType((*MetricMetadataEntry)(nil), "thanos.MetricMetadataEntry") proto.RegisterType((*Meta)(nil), "thanos.Meta") } @@ -351,33 +278,37 @@ func init() { func init() { proto.RegisterFile("metadata/metadatapb/rpc.proto", fileDescriptor_1d9ae5661e0dc3fc) } var fileDescriptor_1d9ae5661e0dc3fc = []byte{ - // 413 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x52, 0xc1, 0x8e, 0xd3, 0x30, - 0x10, 0x5d, 0xb7, 0x9b, 0xd0, 0x4e, 0x61, 0x85, 0x0c, 0x94, 0x6c, 0xd0, 0x86, 0x2a, 0xe2, 0x90, - 0x53, 0x02, 0x81, 0x03, 0x70, 0x42, 0x95, 0x90, 0xf6, 0xb2, 0x12, 0x98, 0x0b, 0x02, 0xa1, 0x95, - 0xbb, 0x58, 0x6d, 0x44, 0x9a, 0x18, 0x7b, 0x02, 0xca, 0xef, 0x70, 0xe6, 0x43, 0x38, 0xf2, 0x09, - 0xa8, 0x5f, 0x82, 0x62, 0x27, 0x2d, 0x81, 0x70, 0x89, 0xde, 0xcc, 0xbc, 0xcc, 0x7b, 0x33, 0x1e, - 0x38, 0xdb, 0x0a, 0xe4, 0x1f, 0x39, 0xf2, 0xa4, 0x03, 0x72, 0x95, 0x28, 0x79, 0x15, 0x4b, 0x55, - 0x62, 0x49, 0x5d, 0xdc, 0xf0, 0xa2, 0xd4, 0xfe, 0xa9, 0xc6, 0x52, 0x89, 0xc4, 0x7c, 0xe5, 0x2a, - 0xc1, 0x5a, 0x0a, 0x6d, 0x29, 0xe1, 0x37, 0x02, 0x77, 0x2e, 0x04, 0xaa, 0xec, 0xea, 0xa2, 0xed, - 0xc0, 0xc4, 0xe7, 0x4a, 0x68, 0xa4, 0x73, 0x70, 0xb7, 0xa6, 0xe0, 0x91, 0x05, 0x89, 0xa6, 0xac, - 0x8d, 0xe8, 0x6d, 0x70, 0xf2, 0x6c, 0x9b, 0xa1, 0x37, 0x5a, 0x90, 0xc8, 0x61, 0x36, 0xa0, 0xef, - 0xe1, 0x54, 0x72, 0x85, 0x19, 0xcf, 0x2f, 0x95, 0xd0, 0xb2, 0x2c, 0xb4, 0xb8, 0xd4, 0xa8, 0x38, - 0x8a, 0x75, 0xed, 0x8d, 0x17, 0x24, 0x3a, 0x49, 0xef, 0xc7, 0xd6, 0x4e, 0xfc, 0xca, 0x12, 0x59, - 0xcb, 0x7b, 0xd3, 0xd2, 0xd8, 0x5d, 0x39, 0x5c, 0x08, 0x11, 0xe6, 0x7f, 0x7b, 0xb4, 0x0c, 0xfa, - 0x04, 0x26, 0xdd, 0xe4, 0xc6, 0xe6, 0x2c, 0x9d, 0x77, 0x2a, 0xfd, 0x3f, 0xce, 0x8f, 0xd8, 0x9e, - 0x49, 0x7d, 0xb8, 0xf6, 0x95, 0xab, 0x22, 0x2b, 0xd6, 0x66, 0x88, 0xe9, 0xf9, 0x11, 0xeb, 0x12, - 0xcb, 0x09, 0xb8, 0x4a, 0xe8, 0x2a, 0xc7, 0xf0, 0x3b, 0x81, 0x93, 0x7e, 0x13, 0xfa, 0xa2, 0x27, - 0x37, 0x8e, 0x66, 0xe9, 0x83, 0x61, 0xb9, 0xb8, 0x03, 0x2f, 0x0b, 0x54, 0xf5, 0x41, 0xda, 0x7f, - 0x0b, 0x37, 0x7a, 0x25, 0x7a, 0x13, 0xc6, 0x9f, 0x44, 0xdd, 0xee, 0xb8, 0x81, 0xf4, 0x11, 0x38, - 0x5f, 0x78, 0x5e, 0x09, 0xe3, 0x6d, 0x96, 0xde, 0x1b, 0x56, 0xb0, 0x8d, 0x2d, 0xf3, 0xf9, 0xe8, - 0x29, 0x09, 0x9f, 0xc1, 0xad, 0x01, 0x06, 0x0d, 0xc1, 0x69, 0xc4, 0xb5, 0x37, 0x32, 0x7e, 0xaf, - 0xff, 0xd1, 0x8d, 0x33, 0x5b, 0x0a, 0x97, 0x70, 0xdc, 0x84, 0x94, 0xc2, 0x71, 0x73, 0x1b, 0xad, - 0x19, 0x83, 0x9b, 0xdc, 0x46, 0xe4, 0xd2, 0x2e, 0x8a, 0x19, 0xdc, 0xe4, 0xaa, 0x22, 0x43, 0xf3, - 0xae, 0x53, 0x66, 0x70, 0xfa, 0x01, 0x26, 0xfb, 0x35, 0xbd, 0xfe, 0x67, 0x71, 0x67, 0xc3, 0x43, - 0xb4, 0xb7, 0xe6, 0x07, 0xff, 0x2b, 0xdb, 0x67, 0x7e, 0x48, 0x96, 0xde, 0x8f, 0x5d, 0x40, 0x7e, - 0xee, 0x02, 0xf2, 0x6b, 0x17, 0x90, 0x77, 0x70, 0x38, 0xf7, 0x95, 0x6b, 0x0e, 0xf9, 0xf1, 0xef, - 0x00, 0x00, 0x00, 0xff, 0xff, 0x5c, 0x41, 0xbd, 0xee, 0x0c, 0x03, 0x00, 0x00, + // 465 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x53, 0xcd, 0x6e, 0x13, 0x31, + 0x10, 0x5e, 0xe7, 0x8f, 0x74, 0x02, 0x15, 0x32, 0x25, 0xa4, 0x0b, 0xdd, 0x44, 0x2b, 0x0e, 0x7b, + 0xca, 0xc2, 0xc2, 0x01, 0x71, 0x41, 0x8a, 0x04, 0xea, 0xa5, 0x12, 0x98, 0x0b, 0x02, 0xa1, 0xe2, + 0x14, 0x2b, 0x5d, 0xb1, 0xd9, 0x35, 0xf6, 0x04, 0x94, 0xb7, 0xe0, 0x19, 0x78, 0x04, 0x9e, 0x22, + 0xc7, 0x1e, 0x39, 0x55, 0x90, 0xdc, 0x78, 0x0a, 0x64, 0x7b, 0xb7, 0x34, 0xb0, 0x5c, 0x46, 0x33, + 0xf3, 0x7d, 0x9e, 0xf9, 0x3c, 0x1e, 0xc3, 0xc1, 0x5c, 0x20, 0x7f, 0xcf, 0x91, 0xc7, 0x95, 0x23, + 0xa7, 0xb1, 0x92, 0x27, 0x63, 0xa9, 0x0a, 0x2c, 0x68, 0x07, 0x4f, 0x79, 0x5e, 0x68, 0x7f, 0x5f, + 0x63, 0xa1, 0x44, 0x6c, 0xad, 0x9c, 0xc6, 0xb8, 0x94, 0x42, 0x3b, 0x8a, 0xbf, 0x37, 0x2b, 0x66, + 0x85, 0x75, 0x63, 0xe3, 0xb9, 0x6c, 0xf8, 0x95, 0xc0, 0xcd, 0x23, 0x81, 0x2a, 0x3d, 0x39, 0x2a, + 0xeb, 0x32, 0xf1, 0x71, 0x21, 0x34, 0xd2, 0x3e, 0x74, 0xe6, 0x16, 0x18, 0x90, 0x11, 0x89, 0x76, + 0x58, 0x19, 0xd1, 0x3d, 0x68, 0x67, 0xe9, 0x3c, 0xc5, 0x41, 0x63, 0x44, 0xa2, 0x36, 0x73, 0x01, + 0x7d, 0x03, 0xfb, 0x92, 0x2b, 0x4c, 0x79, 0x76, 0xac, 0x84, 0x96, 0x45, 0xae, 0xc5, 0xb1, 0x46, + 0xc5, 0x51, 0xcc, 0x96, 0x83, 0xe6, 0x88, 0x44, 0xbb, 0xc9, 0x70, 0xec, 0x44, 0x8e, 0x9f, 0x3b, + 0x22, 0x2b, 0x79, 0x2f, 0x4b, 0x1a, 0xbb, 0x25, 0xeb, 0x81, 0x10, 0xa1, 0xff, 0xb7, 0x46, 0xc7, + 0xa0, 0x0f, 0xa1, 0x5b, 0xcd, 0xc3, 0xca, 0xec, 0x25, 0xfd, 0xaa, 0xcb, 0xf6, 0x89, 0x43, 0x8f, + 0x5d, 0x30, 0xa9, 0x0f, 0x57, 0x3e, 0x73, 0x95, 0xa7, 0xf9, 0xcc, 0x5e, 0x62, 0xe7, 0xd0, 0x63, + 0x55, 0x62, 0xd2, 0x85, 0x8e, 0x12, 0x7a, 0x91, 0x61, 0xf8, 0x8d, 0xc0, 0xee, 0x76, 0x11, 0xfa, + 0x6c, 0xab, 0x5d, 0x33, 0xea, 0x25, 0x77, 0xeb, 0xdb, 0x8d, 0x2b, 0xe7, 0x69, 0x8e, 0x6a, 0x39, + 0x69, 0xad, 0xce, 0x87, 0x97, 0x04, 0xf8, 0xaf, 0xe0, 0xda, 0x16, 0x81, 0x5e, 0x87, 0xe6, 0x07, + 0xb1, 0x2c, 0x27, 0x6d, 0x5c, 0x7a, 0x1f, 0xda, 0x9f, 0x78, 0xb6, 0x10, 0x56, 0x61, 0x2f, 0xb9, + 0x5d, 0xdf, 0xc7, 0x9e, 0x66, 0x8e, 0xf9, 0xb8, 0xf1, 0x88, 0x84, 0x4f, 0xe0, 0x46, 0x0d, 0x83, + 0x46, 0xd0, 0x36, 0xcd, 0xf5, 0xa0, 0x61, 0x55, 0x5f, 0xbd, 0x54, 0x8d, 0x97, 0xea, 0x1c, 0x21, + 0x7c, 0x07, 0x2d, 0x93, 0xa4, 0x77, 0xa0, 0x65, 0xb6, 0xc7, 0x49, 0x9a, 0x74, 0x7f, 0x9d, 0x0f, + 0x6d, 0xcc, 0xac, 0x35, 0xe8, 0xa9, 0xc8, 0xa4, 0x1b, 0x9f, 0x43, 0x4d, 0xcc, 0xac, 0x35, 0xe8, + 0x22, 0x4f, 0xd1, 0xbe, 0x7b, 0x89, 0x9a, 0x98, 0x59, 0x9b, 0xbc, 0x85, 0xee, 0xc5, 0x40, 0x5f, + 0xfc, 0x33, 0xe2, 0x83, 0xfa, 0x8b, 0x96, 0x5b, 0xe9, 0x07, 0xff, 0x83, 0xdd, 0x42, 0xdc, 0x23, + 0x93, 0x68, 0xf5, 0x33, 0xf0, 0x56, 0xeb, 0x80, 0x9c, 0xad, 0x03, 0xf2, 0x63, 0x1d, 0x90, 0x2f, + 0x9b, 0xc0, 0x3b, 0xdb, 0x04, 0xde, 0xf7, 0x4d, 0xe0, 0xbd, 0x86, 0x3f, 0xdf, 0x67, 0xda, 0xb1, + 0x5f, 0xe0, 0xc1, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfa, 0x76, 0xe8, 0xa7, 0x5c, 0x03, 0x00, + 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -507,10 +438,6 @@ func (m *MetricMetadataRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.PartialResponseStrategy != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.PartialResponseStrategy)) i-- @@ -551,10 +478,6 @@ func (m *MetricMetadataResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -622,26 +545,20 @@ func (m *MetricMetadata) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Metadata) > 0 { for k := range m.Metadata { v := m.Metadata[k] baseI := i - if v != nil { - { - size, err := v.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := (&v).MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x12 i -= len(k) copy(dAtA[i:], k) i = encodeVarintRpc(dAtA, i, uint64(len(k))) @@ -675,10 +592,6 @@ func (m *MetricMetadataEntry) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Metas) > 0 { for iNdEx := len(m.Metas) - 1; iNdEx >= 0; iNdEx-- { { @@ -716,10 +629,6 @@ func (m *Meta) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Unit) > 0 { i -= len(m.Unit) copy(dAtA[i:], m.Unit) @@ -771,9 +680,6 @@ func (m *MetricMetadataRequest) Size() (n int) { if m.PartialResponseStrategy != 0 { n += 1 + sovRpc(uint64(m.PartialResponseStrategy)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -786,9 +692,6 @@ func (m *MetricMetadataResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -824,18 +727,11 @@ func (m *MetricMetadata) Size() (n int) { for k, v := range m.Metadata { _ = k _ = v - l = 0 - if v != nil { - l = v.Size() - l += 1 + sovRpc(uint64(l)) - } - mapEntrySize := 1 + len(k) + sovRpc(uint64(len(k))) + l + l = v.Size() + mapEntrySize := 1 + len(k) + sovRpc(uint64(len(k))) + 1 + l + sovRpc(uint64(l)) n += mapEntrySize + 1 + sovRpc(uint64(mapEntrySize)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -851,9 +747,6 @@ func (m *MetricMetadataEntry) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -875,9 +768,6 @@ func (m *Meta) Size() (n int) { if l > 0 { n += 1 + l + sovRpc(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -998,7 +888,6 @@ func (m *MetricMetadataRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1116,7 +1005,6 @@ func (m *MetricMetadataResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1185,10 +1073,10 @@ func (m *MetricMetadata) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Metadata == nil { - m.Metadata = make(map[string]*MetricMetadataEntry) + m.Metadata = make(map[string]MetricMetadataEntry) } var mapkey string - var mapvalue *MetricMetadataEntry + mapvalue := &MetricMetadataEntry{} for iNdEx < postIndex { entryPreIndex := iNdEx var wire uint64 @@ -1282,7 +1170,7 @@ func (m *MetricMetadata) Unmarshal(dAtA []byte) error { iNdEx += skippy } } - m.Metadata[mapkey] = mapvalue + m.Metadata[mapkey] = *mapvalue iNdEx = postIndex default: iNdEx = preIndex @@ -1296,7 +1184,6 @@ func (m *MetricMetadata) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1364,7 +1251,7 @@ func (m *MetricMetadataEntry) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Metas = append(m.Metas, &Meta{}) + m.Metas = append(m.Metas, Meta{}) if err := m.Metas[len(m.Metas)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1381,7 +1268,6 @@ func (m *MetricMetadataEntry) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1528,7 +1414,6 @@ func (m *Meta) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/metadata/metadatapb/rpc.proto b/pkg/metadata/metadatapb/rpc.proto index bb906f548a..96ff350634 100644 --- a/pkg/metadata/metadatapb/rpc.proto +++ b/pkg/metadata/metadatapb/rpc.proto @@ -5,9 +5,21 @@ syntax = "proto3"; package thanos; import "store/storepb/types.proto"; +import "gogoproto/gogo.proto"; option go_package = "metadatapb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + service Metadata { rpc MetricMetadata(MetricMetadataRequest) returns (stream MetricMetadataResponse); } @@ -30,18 +42,15 @@ message MetricMetadataResponse { } message MetricMetadata { - map metadata = 1; + map metadata = 1 [(gogoproto.nullable) = false]; } message MetricMetadataEntry { - repeated Meta metas = 2 ; + repeated Meta metas = 2 [(gogoproto.nullable) = false]; } message Meta { - // @gotags: json:"type" - string type = 1; - // @gotags: json:"help" - string help = 2; - // @gotags: json:"unit" - string unit = 3; + string type = 1 [(gogoproto.jsontag) = "type"]; + string help = 2 [(gogoproto.jsontag) = "help"]; + string unit = 3 [(gogoproto.jsontag) = "unit"]; } diff --git a/pkg/metadata/prometheus_test.go b/pkg/metadata/prometheus_test.go index e4424eb8e2..be05696888 100644 --- a/pkg/metadata/prometheus_test.go +++ b/pkg/metadata/prometheus_test.go @@ -73,27 +73,27 @@ scrape_configs: name string metric string limit int32 - expectedFunc func(map[string][]*metadatapb.Meta) bool + expectedFunc func(map[string][]metadatapb.Meta) bool }{ { name: "all metadata return", limit: -1, // We just check two metrics here. - expectedFunc: func(m map[string][]*metadatapb.Meta) bool { + expectedFunc: func(m map[string][]metadatapb.Meta) bool { return len(m["prometheus_build_info"]) > 0 && len(m["prometheus_engine_query_duration_seconds"]) > 0 }, }, { name: "no metadata return", limit: 0, - expectedFunc: func(m map[string][]*metadatapb.Meta) bool { + expectedFunc: func(m map[string][]metadatapb.Meta) bool { return len(m) == 0 }, }, { name: "only 1 metadata return", limit: 1, - expectedFunc: func(m map[string][]*metadatapb.Meta) bool { + expectedFunc: func(m map[string][]metadatapb.Meta) bool { return len(m) == 1 }, }, @@ -101,7 +101,7 @@ scrape_configs: name: "only prometheus_build_info metadata return", metric: "prometheus_build_info", limit: 1, - expectedFunc: func(m map[string][]*metadatapb.Meta) bool { + expectedFunc: func(m map[string][]metadatapb.Meta) bool { return len(m) == 1 && len(m["prometheus_build_info"]) > 0 }, }, diff --git a/pkg/query/endpointset.go b/pkg/query/endpointset.go index 5d6040d154..39c9041e43 100644 --- a/pkg/query/endpointset.go +++ b/pkg/query/endpointset.go @@ -726,7 +726,7 @@ func (er *endpointRef) TimeRange() (mint, maxt int64) { return er.timeRange() } -func (er *endpointRef) TSDBInfos() []*infopb.TSDBInfo { +func (er *endpointRef) TSDBInfos() []infopb.TSDBInfo { er.mtx.RLock() defer er.mtx.RUnlock() diff --git a/pkg/query/endpointset_test.go b/pkg/query/endpointset_test.go index 06abff945f..431bbeacbb 100644 --- a/pkg/query/endpointset_test.go +++ b/pkg/query/endpointset_test.go @@ -258,7 +258,7 @@ func TestEndpointSetUpdate(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -283,7 +283,7 @@ func TestEndpointSetUpdate(t *testing.T) { err: fmt.Errorf("endpoint unavailable"), InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -304,7 +304,7 @@ func TestEndpointSetUpdate(t *testing.T) { infoDelay: 5 * time.Second, InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -324,7 +324,7 @@ func TestEndpointSetUpdate(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -396,7 +396,7 @@ func TestEndpointSetUpdate_DuplicateSpecs(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -424,7 +424,7 @@ func TestEndpointSetUpdate_EndpointGoingAway(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -536,7 +536,7 @@ func TestEndpointSetUpdate_PruneInactiveEndpoints(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -555,7 +555,7 @@ func TestEndpointSetUpdate_PruneInactiveEndpoints(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), @@ -632,14 +632,14 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "addr", Value: addr}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "a", Value: "b"}, }, }, @@ -649,14 +649,14 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "addr", Value: addr}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "a", Value: "b"}, }, }, @@ -666,14 +666,14 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: queryInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "addr", Value: addr}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "a", Value: "b"}, }, }, @@ -764,15 +764,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: queryInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -783,15 +783,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { // Duplicated Querier, in previous versions it would be deduplicated. Now it should be not. InfoResponse: queryInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -801,9 +801,9 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, @@ -815,9 +815,9 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { // Duplicated Sidecar, in previous versions it would be deduplicated. Now it should be not. InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, @@ -829,9 +829,9 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { // Querier that duplicates with sidecar, in previous versions it would be deduplicated. Now it should be not. InfoResponse: queryInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, @@ -844,9 +844,9 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { // Warning should be produced. InfoResponse: ruleInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, @@ -858,9 +858,9 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { // Duplicated Rule, in previous versions it would be deduplicated. Now it should be not. Warning should be produced. InfoResponse: ruleInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, @@ -872,13 +872,13 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: storeGWInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, { InfoResponse: storeGWInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, // Regression tests against https://github.com/thanos-io/thanos/issues/1632: From v0.8.0 stores advertise labels. @@ -886,15 +886,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: storeGWInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -905,15 +905,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: storeGWInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -924,15 +924,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: storeGWInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -942,15 +942,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: receiveInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -961,15 +961,15 @@ func TestEndpointSetUpdate_AvailabilityScenarios(t *testing.T) { { InfoResponse: receiveInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l1", Value: "v2"}, {Name: "l2", Value: "v3"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "l3", Value: "v4"}, }, }, @@ -1026,9 +1026,9 @@ func TestEndpointSet_Update_NoneAvailable(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "addr", Value: addr, @@ -1041,9 +1041,9 @@ func TestEndpointSet_Update_NoneAvailable(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "addr", Value: addr, @@ -1098,9 +1098,9 @@ func TestEndpoint_Update_QuerierStrict(t *testing.T) { Targets: &infopb.TargetsInfo{}, }, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "addr", Value: addr, @@ -1123,9 +1123,9 @@ func TestEndpoint_Update_QuerierStrict(t *testing.T) { Targets: &infopb.TargetsInfo{}, }, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "addr", Value: addr, @@ -1149,9 +1149,9 @@ func TestEndpoint_Update_QuerierStrict(t *testing.T) { Targets: &infopb.TargetsInfo{}, }, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "addr", Value: addr, @@ -1227,31 +1227,31 @@ func TestEndpointSet_APIs_Discovery(t *testing.T) { { InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, { InfoResponse: ruleInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, { InfoResponse: receiveInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, { InfoResponse: storeGWInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, { InfoResponse: queryInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} }, }, }) @@ -1446,7 +1446,7 @@ func makeInfoResponses(n int) []testEndpointMeta { responses = append(responses, testEndpointMeta{ InfoResponse: sidecarInfo, extlsetFn: func(addr string) labelpb.LabelSets { - return []*labelpb.LabelSet{ + return []labelpb.LabelSet{ { Labels: labelpb.PromLabelsToLabelpbLabels( labels.FromStrings("addr", addr, "a", "b"), diff --git a/pkg/query/iter.go b/pkg/query/iter.go index 253b587a95..7bee002df2 100644 --- a/pkg/query/iter.go +++ b/pkg/query/iter.go @@ -82,20 +82,20 @@ func (*storeSeriesSet) Err() error { return nil } -func (s *storeSeriesSet) At() (labels.Labels, []*storepb.AggrChunk) { +func (s *storeSeriesSet) At() (labels.Labels, []storepb.AggrChunk) { return s.series[s.i].PromLabels(), s.series[s.i].Chunks } // chunkSeries implements storage.Series for a series on storepb types. type chunkSeries struct { lset labels.Labels - chunks []*storepb.AggrChunk + chunks []storepb.AggrChunk mint, maxt int64 aggrs []storepb.Aggr } // newChunkSeries allows to iterate over samples for each sorted and non-overlapped chunks. -func newChunkSeries(lset labels.Labels, chunks []*storepb.AggrChunk, mint, maxt int64, aggrs []storepb.Aggr) *chunkSeries { +func newChunkSeries(lset labels.Labels, chunks []storepb.AggrChunk, mint, maxt int64, aggrs []storepb.Aggr) *chunkSeries { return &chunkSeries{ lset: lset, chunks: chunks, diff --git a/pkg/query/querier_test.go b/pkg/query/querier_test.go index 390afb14be..cb67c6dbd4 100644 --- a/pkg/query/querier_test.go +++ b/pkg/query/querier_test.go @@ -1246,7 +1246,7 @@ func storeSeriesResponse(t testing.TB, lset labels.Labels, smplChunks ...[]sampl var s storepb.Series for _, l := range lset { - s.Labels = append(s.Labels, &labelpb.Label{Name: l.Name, Value: l.Value}) + s.Labels = append(s.Labels, labelpb.Label{Name: l.Name, Value: l.Value}) } for _, smpls := range smplChunks { @@ -1264,7 +1264,7 @@ func storeSeriesResponse(t testing.TB, lset labels.Labels, smplChunks ...[]sampl Raw: &storepb.Chunk{Type: storepb.Chunk_XOR, Data: c.Bytes()}, } - s.Chunks = append(s.Chunks, &ch) + s.Chunks = append(s.Chunks, ch) } return storepb.NewSeriesResponse(&s) } diff --git a/pkg/query/query_test.go b/pkg/query/query_test.go index ebac8fcf69..8d1df8593c 100644 --- a/pkg/query/query_test.go +++ b/pkg/query/query_test.go @@ -104,14 +104,14 @@ func TestQuerier_Proxy(t *testing.T) { // selectStore allows wrapping another storeEndpoints with additional time and matcher selection. type selectStore struct { - matchers []*storepb.LabelMatcher + matchers []storepb.LabelMatcher storepb.StoreServer mint, maxt int64 } // selectedStore wraps given store with selectStore. -func selectedStore(wrapped storepb.StoreServer, matchers []*storepb.LabelMatcher, mint, maxt int64) *selectStore { +func selectedStore(wrapped storepb.StoreServer, matchers []storepb.LabelMatcher, mint, maxt int64) *selectStore { return &selectStore{ StoreServer: wrapped, matchers: matchers, @@ -128,7 +128,7 @@ func (s *selectStore) Series(r *storepb.SeriesRequest, srv storepb.Store_SeriesS r.MaxTime = s.maxt } - matchers := make([]*storepb.LabelMatcher, 0, len(r.Matchers)) + matchers := make([]storepb.LabelMatcher, 0, len(r.Matchers)) matchers = append(matchers, r.Matchers...) req := *r diff --git a/pkg/query/remote_engine.go b/pkg/query/remote_engine.go index 5fa5fbde45..3bfc786bcd 100644 --- a/pkg/query/remote_engine.go +++ b/pkg/query/remote_engine.go @@ -338,7 +338,7 @@ func (r *remoteQuery) Exec(ctx context.Context) *promql.Result { // timestamp as that is when we ran the evaluation. // See https://github.com/prometheus/prometheus/blob/b727e69b7601b069ded5c34348dca41b80988f4b/promql/engine.go#L693-L699 if len(ts.Histograms) > 0 { - result = append(result, promql.Sample{Metric: builder.Labels(), H: prompb.FromProtoHistogram(*ts.Histograms[0]), T: r.start.UnixMilli()}) + result = append(result, promql.Sample{Metric: builder.Labels(), H: prompb.FromProtoHistogram(ts.Histograms[0]), T: r.start.UnixMilli()}) } else { result = append(result, promql.Sample{Metric: builder.Labels(), F: ts.Samples[0].Value, T: r.start.UnixMilli()}) } @@ -417,7 +417,7 @@ func (r *remoteQuery) Exec(ctx context.Context) *promql.Result { for _, hp := range ts.Histograms { series.Histograms = append(series.Histograms, promql.HPoint{ T: hp.Timestamp, - H: prompb.FloatHistogramProtoToFloatHistogram(*hp), + H: prompb.FloatHistogramProtoToFloatHistogram(hp), }) } result = append(result, series) diff --git a/pkg/query/remote_engine_test.go b/pkg/query/remote_engine_test.go index 3896469341..4bc05df857 100644 --- a/pkg/query/remote_engine_test.go +++ b/pkg/query/remote_engine_test.go @@ -63,32 +63,32 @@ func TestRemoteEngine_Warnings(t *testing.T) { func TestRemoteEngine_LabelSets(t *testing.T) { tests := []struct { name string - tsdbInfos []*infopb.TSDBInfo + tsdbInfos []infopb.TSDBInfo replicaLabels []string - partitionLabels []string expected []labels.Labels + partitionLabels []string }{ { name: "empty label sets", - tsdbInfos: []*infopb.TSDBInfo{}, + tsdbInfos: []infopb.TSDBInfo{}, expected: []labels.Labels{}, }, { name: "empty label sets with replica labels", - tsdbInfos: []*infopb.TSDBInfo{}, + tsdbInfos: []infopb.TSDBInfo{}, replicaLabels: []string{"replica"}, expected: []labels.Labels{}, }, { name: "non-empty label sets", - tsdbInfos: []*infopb.TSDBInfo{{ + tsdbInfos: []infopb.TSDBInfo{{ Labels: labelSetFromStrings("a", "1"), }}, expected: []labels.Labels{labels.FromStrings("a", "1")}, }, { name: "non-empty label sets with replica labels", - tsdbInfos: []*infopb.TSDBInfo{{ + tsdbInfos: []infopb.TSDBInfo{{ Labels: labelSetFromStrings("a", "1", "b", "2"), }}, replicaLabels: []string{"a"}, @@ -96,7 +96,7 @@ func TestRemoteEngine_LabelSets(t *testing.T) { }, { name: "replica labels not in label sets", - tsdbInfos: []*infopb.TSDBInfo{ + tsdbInfos: []infopb.TSDBInfo{ { Labels: labelSetFromStrings("a", "1", "c", "2"), }, @@ -106,7 +106,7 @@ func TestRemoteEngine_LabelSets(t *testing.T) { }, { name: "non-empty label sets with partition labels", - tsdbInfos: []*infopb.TSDBInfo{ + tsdbInfos: []infopb.TSDBInfo{ { Labels: labelSetFromStrings("a", "1", "c", "2"), }, @@ -132,24 +132,24 @@ func TestRemoteEngine_LabelSets(t *testing.T) { func TestRemoteEngine_MinT(t *testing.T) { tests := []struct { name string - tsdbInfos []*infopb.TSDBInfo + tsdbInfos []infopb.TSDBInfo replicaLabels []string expected int64 }{ { name: "empty label sets", - tsdbInfos: []*infopb.TSDBInfo{}, + tsdbInfos: []infopb.TSDBInfo{}, expected: math.MaxInt64, }, { name: "empty label sets with replica labels", - tsdbInfos: []*infopb.TSDBInfo{}, + tsdbInfos: []infopb.TSDBInfo{}, replicaLabels: []string{"replica"}, expected: math.MaxInt64, }, { name: "non-empty label sets", - tsdbInfos: []*infopb.TSDBInfo{{ + tsdbInfos: []infopb.TSDBInfo{{ Labels: labelSetFromStrings("a", "1"), MinTime: 30, }}, @@ -157,7 +157,7 @@ func TestRemoteEngine_MinT(t *testing.T) { }, { name: "non-empty label sets with replica labels", - tsdbInfos: []*infopb.TSDBInfo{{ + tsdbInfos: []infopb.TSDBInfo{{ Labels: labelSetFromStrings("a", "1", "b", "2"), MinTime: 30, }}, @@ -166,7 +166,7 @@ func TestRemoteEngine_MinT(t *testing.T) { }, { name: "replicated labelsets with different mint", - tsdbInfos: []*infopb.TSDBInfo{ + tsdbInfos: []infopb.TSDBInfo{ { Labels: labelSetFromStrings("a", "1", "replica", "1"), MinTime: 30, @@ -181,7 +181,7 @@ func TestRemoteEngine_MinT(t *testing.T) { }, { name: "multiple replicated labelsets with different mint", - tsdbInfos: []*infopb.TSDBInfo{ + tsdbInfos: []infopb.TSDBInfo{ { Labels: labelSetFromStrings("a", "1", "replica", "1"), MinTime: 30, @@ -216,8 +216,8 @@ func TestRemoteEngine_MinT(t *testing.T) { } } -func labelSetFromStrings(ss ...string) *labelpb.LabelSet { - return &labelpb.LabelSet{ +func labelSetFromStrings(ss ...string) labelpb.LabelSet { + return labelpb.LabelSet{ Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings(ss...)), } } diff --git a/pkg/queryfrontend/downsampled_test.go b/pkg/queryfrontend/downsampled_test.go index 062e435535..6ffe0ece2f 100644 --- a/pkg/queryfrontend/downsampled_test.go +++ b/pkg/queryfrontend/downsampled_test.go @@ -14,28 +14,28 @@ import ( func TestDownsampled_MinResponseTime(t *testing.T) { for _, tc := range []struct { desc string - sampleStreams []*queryrange.SampleStream + sampleStreams []queryrange.SampleStream expected int64 }{ { desc: "empty []sampleStream", - sampleStreams: []*queryrange.SampleStream{}, + sampleStreams: []queryrange.SampleStream{}, expected: -1, }, { desc: "one SampleStream with zero samples", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{}, + Samples: []cortexpb.Sample{}, }, }, expected: -1, }, { desc: "one SampleStream with one sample at zero time", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 0}, }, }, @@ -44,9 +44,9 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "one SampleStream with one sample", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 1}, }, }, @@ -55,14 +55,14 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "two SampleStreams, first is the earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 1}, }, }, { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 2}, }, }, @@ -71,20 +71,20 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "three SampleStreams, second is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 2}, {TimestampMs: 3}, }, }, { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 1}, }, }, { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 2}, }, }, @@ -93,20 +93,20 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "three SampleStreams, last is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 2}, {TimestampMs: 3}, }, }, { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 2}, }, }, { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 1}, }, }, @@ -115,20 +115,20 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "three histogram SampleStreams, last is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 2}, {Timestamp: 3}, }, }, { - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 2}, }, }, { - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 1}, }, }, @@ -137,14 +137,14 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "mixed float and histogram SampleStreams, float is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 1}, }, }, { - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 2}, }, }, @@ -153,12 +153,12 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "mixed float and histogram SampleStreams, float is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 1}, }, - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 2}, }, }, @@ -167,14 +167,14 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "mixed float and histogram SampleStreams, histogram is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 3}, }, }, { - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 2}, }, }, @@ -183,12 +183,12 @@ func TestDownsampled_MinResponseTime(t *testing.T) { }, { desc: "mixed float and histogram SampleStream, histogram is earliest", - sampleStreams: []*queryrange.SampleStream{ + sampleStreams: []queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{ + Samples: []cortexpb.Sample{ {TimestampMs: 3}, }, - Histograms: []*queryrange.SampleHistogramPair{ + Histograms: []queryrange.SampleHistogramPair{ {Timestamp: 2}, }, }, diff --git a/pkg/queryfrontend/labels_codec_test.go b/pkg/queryfrontend/labels_codec_test.go index 5f303af915..a592078bac 100644 --- a/pkg/queryfrontend/labels_codec_test.go +++ b/pkg/queryfrontend/labels_codec_test.go @@ -329,14 +329,14 @@ func TestLabelsCodec_DecodeResponse(t *testing.T) { seriesResponse := &ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}, } seriesData, err := json.Marshal(seriesResponse) testutil.Ok(t, err) seriesResponseWithHeaders := &ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}, Headers: []*ResponseHeader{{Name: cacheControlHeader, Values: []string{noStoreValue}}}, } seriesDataWithHeaders, err := json.Marshal(seriesResponseWithHeaders) @@ -468,45 +468,45 @@ func TestLabelsCodec_MergeResponse(t *testing.T) { { name: "One series response", responses: []queryrange.Response{ - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, }, - expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, }, { name: "One series response and two empty responses", responses: []queryrange.Response{ &ThanosSeriesResponse{Status: queryrange.StatusSuccess}, - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, &ThanosSeriesResponse{Status: queryrange.StatusSuccess}, }, - expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, }, { name: "Multiple duplicate series responses", responses: []queryrange.Response{ - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, }, - expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}}, + expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}}, }, { name: "Multiple unordered series responses", responses: []queryrange.Response{ - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{ - {Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}, - {Labels: []*labelpb.Label{{Name: "test", Value: "aaa"}, {Name: "instance", Value: "localhost:9090"}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{ + {Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}, + {Labels: []labelpb.Label{{Name: "test", Value: "aaa"}, {Name: "instance", Value: "localhost:9090"}}}, }}, - &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{ - {Labels: []*labelpb.Label{{Name: "foo", Value: "aaa"}}}, - {Labels: []*labelpb.Label{{Name: "test", Value: "bbb"}, {Name: "instance", Value: "localhost:9091"}}}, + &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{ + {Labels: []labelpb.Label{{Name: "foo", Value: "aaa"}}}, + {Labels: []labelpb.Label{{Name: "test", Value: "bbb"}, {Name: "instance", Value: "localhost:9091"}}}, }}, }, - expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []*labelpb.LabelSet{ - {Labels: []*labelpb.Label{{Name: "foo", Value: "aaa"}}}, - {Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}, - {Labels: []*labelpb.Label{{Name: "test", Value: "aaa"}, {Name: "instance", Value: "localhost:9090"}}}, - {Labels: []*labelpb.Label{{Name: "test", Value: "bbb"}, {Name: "instance", Value: "localhost:9091"}}}, + expectedResponse: &ThanosSeriesResponse{Status: "success", Data: []labelpb.LabelSet{ + {Labels: []labelpb.Label{{Name: "foo", Value: "aaa"}}}, + {Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}, + {Labels: []labelpb.Label{{Name: "test", Value: "aaa"}, {Name: "instance", Value: "localhost:9090"}}}, + {Labels: []labelpb.Label{{Name: "test", Value: "bbb"}, {Name: "instance", Value: "localhost:9091"}}}, }}, }, } { @@ -576,7 +576,7 @@ func BenchmarkLabelsCodecDecodeResponse(b *testing.B) { b.Run("SeriesResponse", func(b *testing.B) { seriesData, err := json.Marshal(&ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}, }) testutil.Ok(b, err) @@ -595,7 +595,7 @@ func BenchmarkLabelsCodecDecodeResponse(b *testing.B) { b.Run("SeriesResponseWithHeaders", func(b *testing.B) { seriesDataWithHeaders, err := json.Marshal(&ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "foo", Value: "bar"}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "foo", Value: "bar"}}}}, Headers: []*ResponseHeader{{Name: cacheControlHeader, Values: []string{noStoreValue}}}, }) testutil.Ok(b, err) @@ -707,11 +707,11 @@ func makeQueryRangeResponses(size int) ([]queryrange.Response, []queryrange.Resp seriesResp, &ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: fmt.Sprintf("foo-%d", i), Value: fmt.Sprintf("bar-%d", i)}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: fmt.Sprintf("foo-%d", i), Value: fmt.Sprintf("bar-%d", i)}}}}, }, &ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: fmt.Sprintf("foo-%d", i+1), Value: fmt.Sprintf("bar-%d", i+1)}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: fmt.Sprintf("foo-%d", i+1), Value: fmt.Sprintf("bar-%d", i+1)}}}}, }, ) } diff --git a/pkg/queryfrontend/queryinstant_codec.go b/pkg/queryfrontend/queryinstant_codec.go index 698a4e7928..a44bb7d94e 100644 --- a/pkg/queryfrontend/queryinstant_codec.go +++ b/pkg/queryfrontend/queryinstant_codec.go @@ -69,9 +69,9 @@ func (c queryInstantCodec) MergeResponse(req queryrange.Request, responses ...qu case model.ValMatrix.String(): res = &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: matrixMerge(promResponses), }, @@ -87,9 +87,9 @@ func (c queryInstantCodec) MergeResponse(req queryrange.Request, responses ...qu } res = &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: v, }, @@ -305,7 +305,7 @@ func vectorMerge(req queryrange.Request, resps []*queryrange.PrometheusInstantQu if s == nil { continue } - metric := cortexpb.LabelPairToModelMetric(sample.Labels).String() + metric := cortexpb.FromLabelAdaptersToLabels(sample.Labels).String() if existingSample, ok := output[metric]; !ok { output[metric] = s metrics = append(metrics, metric) // Preserve the order of metric. @@ -432,7 +432,7 @@ func matrixMerge(resps []*queryrange.PrometheusInstantQueryResponse) *queryrange continue } for _, stream := range resp.Data.Result.GetMatrix().SampleStreams { - metric := cortexpb.LabelPairToModelMetric(stream.Labels).String() + metric := cortexpb.FromLabelAdaptersToLabels(stream.Labels).String() existing, ok := output[metric] if !ok { existing = &queryrange.SampleStream{ diff --git a/pkg/queryfrontend/queryinstant_codec_test.go b/pkg/queryfrontend/queryinstant_codec_test.go index a61e18cf16..3d60cf6960 100644 --- a/pkg/queryfrontend/queryinstant_codec_test.go +++ b/pkg/queryfrontend/queryinstant_codec_test.go @@ -9,6 +9,7 @@ import ( "io" "net/http" "testing" + "time" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" @@ -289,15 +290,13 @@ func TestMergeResponse(t *testing.T) { req: defaultReq, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValScalar.String(), Analysis: &queryrange.Analysis{ - Name: "foo", - ExecutionTime: &queryrange.Duration{ - Seconds: 1, - }, + Name: "foo", + ExecutionTime: queryrange.Duration(1 * time.Second), }, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Scalar{ Scalar: &cortexpb.Sample{ TimestampMs: 0, @@ -310,15 +309,13 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValScalar.String(), Analysis: &queryrange.Analysis{ - Name: "foo", - ExecutionTime: &queryrange.Duration{ - Seconds: 1, - }, + Name: "foo", + ExecutionTime: queryrange.Duration(1 * time.Second), }, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Scalar{ Scalar: &cortexpb.Sample{ TimestampMs: 0, @@ -336,10 +333,10 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{}, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ Analysis: nil, ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{}, }, }, @@ -351,18 +348,18 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", - }), + })), }, }, }, @@ -373,19 +370,19 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), Analysis: nil, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", - }), + })), }, }, }, @@ -402,19 +399,19 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -424,19 +421,19 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, }, }, @@ -447,28 +444,28 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, { Timestamp: 0, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, }, }, @@ -485,19 +482,19 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -507,19 +504,19 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, }, }, @@ -530,28 +527,28 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ Analysis: &queryrange.Analysis{}, ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, { Timestamp: 0, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, }, }, @@ -566,19 +563,19 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -588,19 +585,19 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, }, }, @@ -611,28 +608,28 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -647,19 +644,19 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 0, SampleValue: 1, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -669,19 +666,19 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 1, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -692,20 +689,20 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { Timestamp: 1, SampleValue: 2, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -720,9 +717,9 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValScalar.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Scalar{ Scalar: &cortexpb.Sample{ TimestampMs: 0, @@ -734,9 +731,9 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValScalar.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Scalar{ Scalar: &cortexpb.Sample{ TimestampMs: 0, @@ -749,10 +746,10 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{}, @@ -768,25 +765,25 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -796,25 +793,25 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{{TimestampMs: 2, Value: 3}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 2, Value: 3}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { - Samples: []*cortexpb.Sample{{TimestampMs: 2, Value: 3}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 2, Value: 3}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -825,26 +822,26 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}, {TimestampMs: 2, Value: 3}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}, {TimestampMs: 2, Value: 3}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}, {TimestampMs: 2, Value: 3}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}, {TimestampMs: 2, Value: 3}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -859,26 +856,26 @@ func TestMergeResponse(t *testing.T) { resps: []queryrange.Response{ &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -888,25 +885,25 @@ func TestMergeResponse(t *testing.T) { }, &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -917,26 +914,26 @@ func TestMergeResponse(t *testing.T) { }, expectedResp: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), Analysis: &queryrange.Analysis{}, - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "bar", - }), + })), }, { - Samples: []*cortexpb.Sample{{TimestampMs: 1, Value: 2}}, - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Samples: []cortexpb.Sample{{TimestampMs: 1, Value: 2}}, + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "job": "foo", - }), + })), }, }, }, @@ -984,15 +981,13 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ Analysis: &queryrange.Analysis{ - Name: "[noArgFunction]", - ExecutionTime: &queryrange.Duration{ - Seconds: 1, - }, + Name: "[noArgFunction]", + ExecutionTime: queryrange.Duration(1 * time.Second), }, ResultType: model.ValScalar.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Scalar{ Scalar: &cortexpb.Sample{TimestampMs: 1708690766576, Value: 1708690766.576}, }, @@ -1014,9 +1009,9 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{}, @@ -1050,18 +1045,18 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValVector.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Vector{ Vector: &queryrange.Vector{ Samples: []*queryrange.Sample{ { - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "instance": "localhost:9090", "job": "prometheus", - }), + })), Timestamp: 1661020672043, SampleValue: 1, }, @@ -1087,9 +1082,9 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValScalar.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Scalar{ Scalar: &cortexpb.Sample{TimestampMs: 1661020145547, Value: 1}, }, @@ -1112,9 +1107,9 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValString.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_StringSample{ StringSample: &queryrange.StringSample{TimestampMs: 1661020232424, Value: "test"}, }, @@ -1136,9 +1131,9 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{}, @@ -1186,19 +1181,19 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "up", "instance": "localhost:9090", "job": "prometheus", - }), - Samples: []*cortexpb.Sample{ + })), + Samples: []cortexpb.Sample{ {TimestampMs: 1661020250310, Value: 1}, {TimestampMs: 1661020265309, Value: 1}, {TimestampMs: 1661020280309, Value: 1}, @@ -1279,21 +1274,21 @@ func TestDecodeResponse(t *testing.T) { expectedResponse: &queryrange.PrometheusInstantQueryResponse{ Status: queryrange.StatusSuccess, Headers: headers, - Data: &queryrange.PrometheusInstantQueryData{ + Data: queryrange.PrometheusInstantQueryData{ ResultType: model.ValMatrix.String(), - Result: &queryrange.PrometheusInstantQueryResult{ + Result: queryrange.PrometheusInstantQueryResult{ Result: &queryrange.PrometheusInstantQueryResult_Matrix{ Matrix: &queryrange.Matrix{ SampleStreams: []*queryrange.SampleStream{ { - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "prometheus_http_requests_total", "code": "200", "handler": "/favicon.ico", "instance": "localhost:9090", "job": "prometheus", - }), - Samples: []*cortexpb.Sample{ + })), + Samples: []cortexpb.Sample{ {TimestampMs: 1661020430311, Value: 1}, {TimestampMs: 1661020445312, Value: 1}, {TimestampMs: 1661020460313, Value: 1}, @@ -1301,14 +1296,14 @@ func TestDecodeResponse(t *testing.T) { }, }, { - Labels: cortexpb.LabelMapToCortexMetric(map[string]string{ + Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromMap(map[string]string{ "__name__": "prometheus_http_requests_total", "code": "200", "handler": "/metrics", "instance": "localhost:9090", "job": "prometheus", - }), - Samples: []*cortexpb.Sample{ + })), + Samples: []cortexpb.Sample{ {TimestampMs: 1661020430311, Value: 33}, {TimestampMs: 1661020445312, Value: 34}, {TimestampMs: 1661020460313, Value: 35}, diff --git a/pkg/queryfrontend/request.go b/pkg/queryfrontend/request.go index edf8f316eb..4a6fb3f0d8 100644 --- a/pkg/queryfrontend/request.go +++ b/pkg/queryfrontend/request.go @@ -77,8 +77,8 @@ func (r *ThanosQueryRangeRequest) GetStep() int64 { return r.Step } // GetQuery returns the query of the request. func (r *ThanosQueryRangeRequest) GetQuery() string { return r.Query } -func (r *ThanosQueryRangeRequest) GetCachingOptions() *queryrange.CachingOptions { - return &r.CachingOptions +func (r *ThanosQueryRangeRequest) GetCachingOptions() queryrange.CachingOptions { + return r.CachingOptions } func (r *ThanosQueryRangeRequest) GetStats() string { return r.Stats } @@ -178,8 +178,8 @@ func (r *ThanosQueryInstantRequest) GetStep() int64 { return 0 } // GetQuery returns the query of the request. func (r *ThanosQueryInstantRequest) GetQuery() string { return r.Query } -func (r *ThanosQueryInstantRequest) GetCachingOptions() *queryrange.CachingOptions { - return &queryrange.CachingOptions{} +func (r *ThanosQueryInstantRequest) GetCachingOptions() queryrange.CachingOptions { + return queryrange.CachingOptions{} } func (r *ThanosQueryInstantRequest) GetStats() string { return r.Stats } @@ -264,9 +264,7 @@ func (r *ThanosLabelsRequest) GetStep() int64 { return 1 } // GetQuery returns the query of the request. func (r *ThanosLabelsRequest) GetQuery() string { return "" } -func (r *ThanosLabelsRequest) GetCachingOptions() *queryrange.CachingOptions { - return &r.CachingOptions -} +func (r *ThanosLabelsRequest) GetCachingOptions() queryrange.CachingOptions { return r.CachingOptions } func (r *ThanosLabelsRequest) GetStats() string { return r.Stats } @@ -351,9 +349,7 @@ func (r *ThanosSeriesRequest) GetStep() int64 { return 1 } // GetQuery returns the query of the request. func (r *ThanosSeriesRequest) GetQuery() string { return "" } -func (r *ThanosSeriesRequest) GetCachingOptions() *queryrange.CachingOptions { - return &r.CachingOptions -} +func (r *ThanosSeriesRequest) GetCachingOptions() queryrange.CachingOptions { return r.CachingOptions } func (r *ThanosSeriesRequest) GetStats() string { return r.Stats } diff --git a/pkg/queryfrontend/response.go b/pkg/queryfrontend/response.go index b46006eca8..a6477130fa 100644 --- a/pkg/queryfrontend/response.go +++ b/pkg/queryfrontend/response.go @@ -46,12 +46,12 @@ func headersToQueryRangeHeaders(headers []*ResponseHeader) []*queryrange.Prometh } // GetHeaders returns the HTTP headers in the response. -func (m *ThanosLabelsResponse) GetQueryRangeHeaders() []*queryrange.PrometheusResponseHeader { +func (m *ThanosLabelsResponse) GetHeaders() []*queryrange.PrometheusResponseHeader { return headersToQueryRangeHeaders(m.Headers) } // GetHeaders returns the HTTP headers in the response. -func (m *ThanosSeriesResponse) GetQueryRangeHeaders() []*queryrange.PrometheusResponseHeader { +func (m *ThanosSeriesResponse) GetHeaders() []*queryrange.PrometheusResponseHeader { return headersToQueryRangeHeaders(m.Headers) } diff --git a/pkg/queryfrontend/response.pb.go b/pkg/queryfrontend/response.pb.go index 7d1606921c..dd0020f591 100644 --- a/pkg/queryfrontend/response.pb.go +++ b/pkg/queryfrontend/response.pb.go @@ -9,6 +9,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" ) @@ -25,19 +26,11 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type ThanosLabelsResponse struct { - // @gotags: json:"status" - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - // @gotags: json:"data" - Data []string `protobuf:"bytes,2,rep,name=Data,proto3" json:"data"` - // @gotags: json:"errorType,omitempty" - ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` - // @gotags: json:"error,omitempty" - Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` - // @gotags: json:"-" - Headers []*ResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data []string `protobuf:"bytes,2,rep,name=Data,proto3" json:"data"` + ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` + Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` + Headers []*ResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` } func (m *ThanosLabelsResponse) Reset() { *m = ThanosLabelsResponse{} } @@ -73,55 +66,12 @@ func (m *ThanosLabelsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ThanosLabelsResponse proto.InternalMessageInfo -func (m *ThanosLabelsResponse) GetStatus() string { - if m != nil { - return m.Status - } - return "" -} - -func (m *ThanosLabelsResponse) GetData() []string { - if m != nil { - return m.Data - } - return nil -} - -func (m *ThanosLabelsResponse) GetErrorType() string { - if m != nil { - return m.ErrorType - } - return "" -} - -func (m *ThanosLabelsResponse) GetError() string { - if m != nil { - return m.Error - } - return "" -} - -func (m *ThanosLabelsResponse) GetHeaders() []*ResponseHeader { - if m != nil { - return m.Headers - } - return nil -} - type ThanosSeriesResponse struct { - // @gotags: json:"status" - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - // @gotags: json:"data" - Data []*labelpb.LabelSet `protobuf:"bytes,2,rep,name=Data,proto3" json:"data"` - // @gotags: json:"errorType,omitempty" - ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` - // @gotags: json:"error,omitempty" - Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` - // @gotags: json:"-" - Headers []*ResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data []labelpb.LabelSet `protobuf:"bytes,2,rep,name=Data,proto3" json:"data"` + ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` + Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` + Headers []*ResponseHeader `protobuf:"bytes,5,rep,name=Headers,proto3" json:"-"` } func (m *ThanosSeriesResponse) Reset() { *m = ThanosSeriesResponse{} } @@ -157,49 +107,9 @@ func (m *ThanosSeriesResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ThanosSeriesResponse proto.InternalMessageInfo -func (m *ThanosSeriesResponse) GetStatus() string { - if m != nil { - return m.Status - } - return "" -} - -func (m *ThanosSeriesResponse) GetData() []*labelpb.LabelSet { - if m != nil { - return m.Data - } - return nil -} - -func (m *ThanosSeriesResponse) GetErrorType() string { - if m != nil { - return m.ErrorType - } - return "" -} - -func (m *ThanosSeriesResponse) GetError() string { - if m != nil { - return m.Error - } - return "" -} - -func (m *ThanosSeriesResponse) GetHeaders() []*ResponseHeader { - if m != nil { - return m.Headers - } - return nil -} - type ResponseHeader struct { - // @gotags: json:"-" - Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"-"` - // @gotags: json:"-" - Values []string `protobuf:"bytes,2,rep,name=Values,proto3" json:"-"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"-"` + Values []string `protobuf:"bytes,2,rep,name=Values,proto3" json:"-"` } func (m *ResponseHeader) Reset() { *m = ResponseHeader{} } @@ -235,20 +145,6 @@ func (m *ResponseHeader) XXX_DiscardUnknown() { var xxx_messageInfo_ResponseHeader proto.InternalMessageInfo -func (m *ResponseHeader) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *ResponseHeader) GetValues() []string { - if m != nil { - return m.Values - } - return nil -} - func init() { proto.RegisterType((*ThanosLabelsResponse)(nil), "queryfrontend.ThanosLabelsResponse") proto.RegisterType((*ThanosSeriesResponse)(nil), "queryfrontend.ThanosSeriesResponse") @@ -258,25 +154,30 @@ func init() { func init() { proto.RegisterFile("queryfrontend/response.proto", fileDescriptor_b882fa7024d92f38) } var fileDescriptor_b882fa7024d92f38 = []byte{ - // 275 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x29, 0x2c, 0x4d, 0x2d, - 0xaa, 0x4c, 0x2b, 0xca, 0xcf, 0x2b, 0x49, 0xcd, 0x4b, 0xd1, 0x2f, 0x4a, 0x2d, 0x2e, 0xc8, 0xcf, - 0x2b, 0x4e, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x45, 0x91, 0x95, 0x92, 0x2c, 0x2e, - 0xc9, 0x2f, 0x4a, 0xd5, 0xcf, 0x49, 0x4c, 0x4a, 0xcd, 0x29, 0x48, 0xd2, 0x2f, 0xa9, 0x2c, 0x48, - 0x2d, 0x86, 0xa8, 0x54, 0x5a, 0xcf, 0xc8, 0x25, 0x12, 0x92, 0x91, 0x98, 0x97, 0x5f, 0xec, 0x03, - 0x92, 0x2d, 0x0e, 0x82, 0x1a, 0x24, 0x24, 0xc6, 0xc5, 0x16, 0x5c, 0x92, 0x58, 0x52, 0x5a, 0x2c, - 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0x04, 0xe5, 0x09, 0x09, 0x71, 0xb1, 0xb8, 0x24, 0x96, 0x24, - 0x4a, 0x30, 0x29, 0x30, 0x6b, 0x70, 0x06, 0x81, 0xd9, 0x42, 0x32, 0x5c, 0x9c, 0xae, 0x45, 0x45, - 0xf9, 0x45, 0x21, 0x95, 0x05, 0xa9, 0x12, 0xcc, 0x60, 0xe5, 0x08, 0x01, 0x21, 0x11, 0x2e, 0x56, - 0x30, 0x47, 0x82, 0x05, 0x2c, 0x03, 0xe1, 0x08, 0x99, 0x73, 0xb1, 0x7b, 0xa4, 0x26, 0xa6, 0xa4, - 0x16, 0x15, 0x4b, 0xb0, 0x2a, 0x30, 0x6b, 0x70, 0x1b, 0xc9, 0xea, 0xa1, 0x38, 0x5a, 0x0f, 0xe6, - 0x12, 0x88, 0xaa, 0x20, 0x98, 0x6a, 0xa5, 0x83, 0x70, 0x17, 0x07, 0xa7, 0x16, 0x65, 0xa6, 0x12, - 0x76, 0xb1, 0x0a, 0x92, 0x8b, 0xb9, 0x8d, 0x04, 0xf4, 0x4a, 0xc0, 0x9a, 0xf5, 0xc0, 0xfe, 0x0d, - 0x4e, 0x2d, 0x19, 0x08, 0x3f, 0xd8, 0x70, 0xf1, 0xa1, 0x4a, 0x81, 0x82, 0xd5, 0x2f, 0x31, 0x37, - 0x15, 0xea, 0x74, 0x30, 0x1b, 0xe4, 0xa1, 0xb0, 0xc4, 0x9c, 0xd2, 0xd4, 0x62, 0x68, 0x60, 0x43, - 0x79, 0x4e, 0x3c, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0x63, - 0x12, 0x1b, 0x38, 0x22, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x2d, 0xc3, 0xd4, 0x7a, 0x12, - 0x02, 0x00, 0x00, + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x92, 0x3f, 0x4f, 0xc2, 0x40, + 0x18, 0xc6, 0x5b, 0x28, 0x55, 0x0e, 0xff, 0xe5, 0x20, 0xb1, 0x10, 0x68, 0x09, 0x13, 0x26, 0xda, + 0x26, 0x18, 0x57, 0x87, 0x46, 0x13, 0x63, 0x8c, 0x43, 0x21, 0xee, 0x47, 0x78, 0x45, 0x12, 0xe8, + 0xd5, 0xbb, 0x63, 0xe8, 0xb7, 0xe0, 0x63, 0x31, 0x32, 0x3a, 0x35, 0x0a, 0x5b, 0x3f, 0x82, 0x93, + 0xe1, 0x7a, 0x8d, 0x30, 0x3a, 0xba, 0xdd, 0x3d, 0xcf, 0xef, 0x7d, 0x93, 0xe7, 0xc9, 0x8b, 0x9a, + 0xef, 0x73, 0x60, 0xf1, 0x2b, 0xa3, 0xa1, 0x80, 0x70, 0xe4, 0x31, 0xe0, 0x11, 0x0d, 0x39, 0xb8, + 0x11, 0xa3, 0x82, 0xe2, 0xe3, 0x3d, 0xb7, 0x51, 0x1b, 0xd3, 0x31, 0x95, 0x8e, 0xb7, 0x7d, 0x65, + 0x50, 0xa3, 0xce, 0x05, 0x65, 0xe0, 0x4d, 0xc9, 0x10, 0xa6, 0xd1, 0xd0, 0x13, 0x71, 0x04, 0x3c, + 0xb3, 0x3a, 0xdf, 0x3a, 0xaa, 0x0d, 0xde, 0x48, 0x48, 0xf9, 0xd3, 0xd6, 0xe5, 0x81, 0x5a, 0x8f, + 0x3b, 0xc8, 0xec, 0x0b, 0x22, 0xe6, 0xdc, 0xd2, 0xdb, 0x7a, 0xb7, 0xec, 0xa3, 0x34, 0x71, 0x4c, + 0x2e, 0x95, 0x40, 0x39, 0xb8, 0x89, 0x8c, 0x3b, 0x22, 0x88, 0x55, 0x68, 0x17, 0xbb, 0x65, 0xff, + 0x30, 0x4d, 0x1c, 0x63, 0x44, 0x04, 0x09, 0xa4, 0x8a, 0x6f, 0x50, 0xf9, 0x9e, 0x31, 0xca, 0x06, + 0x71, 0x04, 0x56, 0x51, 0x2e, 0x39, 0x4f, 0x13, 0xa7, 0x0a, 0xb9, 0x78, 0x49, 0x67, 0x13, 0x01, + 0xb3, 0x48, 0xc4, 0xc1, 0x2f, 0x89, 0x2f, 0x50, 0x49, 0x7e, 0x2c, 0x43, 0x8e, 0x54, 0xd3, 0xc4, + 0x39, 0x95, 0x23, 0x3b, 0x78, 0x46, 0xe0, 0x5b, 0x74, 0xf0, 0x00, 0x64, 0x04, 0x8c, 0x5b, 0xa5, + 0x76, 0xb1, 0x5b, 0xe9, 0xb5, 0xdc, 0xbd, 0x3a, 0xdc, 0x3c, 0x4d, 0x46, 0xf9, 0xa5, 0x34, 0x71, + 0xf4, 0xab, 0x20, 0x1f, 0xea, 0x2c, 0x0a, 0x79, 0xf8, 0x3e, 0xb0, 0x09, 0xfc, 0x2d, 0x7c, 0x6f, + 0x27, 0x7c, 0xa5, 0x77, 0xe6, 0x0a, 0xb9, 0xc8, 0x95, 0x35, 0xf6, 0x41, 0xf8, 0x47, 0xcb, 0xc4, + 0xd1, 0xfe, 0x5d, 0x25, 0x8f, 0xe8, 0x64, 0x9f, 0xc0, 0x75, 0x64, 0x3c, 0x93, 0x19, 0xa8, 0x26, + 0x14, 0x2f, 0x25, 0xdc, 0x42, 0xe6, 0x0b, 0x99, 0xce, 0x81, 0xab, 0x0b, 0x50, 0xa6, 0x12, 0xfd, + 0xe6, 0xf2, 0xcb, 0xd6, 0x96, 0x6b, 0x5b, 0x5f, 0xad, 0x6d, 0xfd, 0x73, 0x6d, 0xeb, 0x8b, 0x8d, + 0xad, 0xad, 0x36, 0xb6, 0xf6, 0xb1, 0xb1, 0xb5, 0xa1, 0x29, 0x0f, 0xf0, 0xfa, 0x27, 0x00, 0x00, + 0xff, 0xff, 0x68, 0xd1, 0x0b, 0xae, 0xe0, 0x02, 0x00, 0x00, } func (m *ThanosLabelsResponse) Marshal() (dAtA []byte, err error) { @@ -299,10 +200,6 @@ func (m *ThanosLabelsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Headers) > 0 { for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { { @@ -370,10 +267,6 @@ func (m *ThanosSeriesResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Headers) > 0 { for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { { @@ -446,10 +339,6 @@ func (m *ResponseHeader) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Values) > 0 { for iNdEx := len(m.Values) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Values[iNdEx]) @@ -510,9 +399,6 @@ func (m *ThanosLabelsResponse) Size() (n int) { n += 1 + l + sovResponse(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -546,9 +432,6 @@ func (m *ThanosSeriesResponse) Size() (n int) { n += 1 + l + sovResponse(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -568,9 +451,6 @@ func (m *ResponseHeader) Size() (n int) { n += 1 + l + sovResponse(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -783,7 +663,6 @@ func (m *ThanosLabelsResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -883,7 +762,7 @@ func (m *ThanosSeriesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Data = append(m.Data, &labelpb.LabelSet{}) + m.Data = append(m.Data, labelpb.LabelSet{}) if err := m.Data[len(m.Data)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -998,7 +877,6 @@ func (m *ThanosSeriesResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1113,7 +991,6 @@ func (m *ResponseHeader) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/queryfrontend/response.proto b/pkg/queryfrontend/response.proto index ab1984229a..47985e3848 100644 --- a/pkg/queryfrontend/response.proto +++ b/pkg/queryfrontend/response.proto @@ -5,37 +5,38 @@ syntax = "proto3"; package queryfrontend; +import "gogoproto/gogo.proto"; import "store/labelpb/types.proto"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + + message ThanosLabelsResponse { - // @gotags: json:"status" - string Status = 1; - // @gotags: json:"data" - repeated string Data = 2; - // @gotags: json:"errorType,omitempty" - string ErrorType = 3; - // @gotags: json:"error,omitempty" - string Error = 4; - // @gotags: json:"-" - repeated ResponseHeader Headers = 5; + string Status = 1 [(gogoproto.jsontag) = "status"]; + repeated string Data = 2 [(gogoproto.jsontag) = "data"]; + string ErrorType = 3 [(gogoproto.jsontag) = "errorType,omitempty"]; + string Error = 4 [(gogoproto.jsontag) = "error,omitempty"]; + repeated ResponseHeader Headers = 5 [(gogoproto.jsontag) = "-"]; } message ThanosSeriesResponse { - // @gotags: json:"status" - string Status = 1; - // @gotags: json:"data" - repeated thanos.LabelSet Data = 2; - // @gotags: json:"errorType,omitempty" - string ErrorType = 3; - // @gotags: json:"error,omitempty" - string Error = 4; - // @gotags: json:"-" - repeated ResponseHeader Headers = 5; + string Status = 1 [(gogoproto.jsontag) = "status"]; + repeated thanos.LabelSet Data = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "data"]; + string ErrorType = 3 [(gogoproto.jsontag) = "errorType,omitempty"]; + string Error = 4 [(gogoproto.jsontag) = "error,omitempty"]; + repeated ResponseHeader Headers = 5 [(gogoproto.jsontag) = "-"]; } message ResponseHeader { - // @gotags: json:"-" - string Name = 1; - // @gotags: json:"-" - repeated string Values = 2; + string Name = 1 [(gogoproto.jsontag) = "-"]; + repeated string Values = 2 [(gogoproto.jsontag) = "-"]; } diff --git a/pkg/queryfrontend/roundtrip_test.go b/pkg/queryfrontend/roundtrip_test.go index 2561db801c..466e2911ea 100644 --- a/pkg/queryfrontend/roundtrip_test.go +++ b/pkg/queryfrontend/roundtrip_test.go @@ -840,12 +840,12 @@ func promqlResults(fail bool) (*int, http.Handler) { var lock sync.Mutex q := queryrange.PrometheusResponse{ Status: "success", - Data: &queryrange.PrometheusData{ + Data: queryrange.PrometheusData{ ResultType: string(parser.ValueTypeMatrix), - Result: []*queryrange.SampleStream{ + Result: []queryrange.SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, }, @@ -876,12 +876,12 @@ func promqlResultsWithFailures(numFailures int) (*atomic.Int64, http.Handler) { var lock sync.Mutex q := queryrange.PrometheusResponse{ Status: "success", - Data: &queryrange.PrometheusData{ + Data: queryrange.PrometheusData{ ResultType: string(parser.ValueTypeMatrix), - Result: []*queryrange.SampleStream{ + Result: []queryrange.SampleStream{ { - Labels: []*cortexpb.LabelPair{}, - Samples: []*cortexpb.Sample{ + Labels: []cortexpb.LabelAdapter{}, + Samples: []cortexpb.Sample{ {Value: 0, TimestampMs: 0}, {Value: 1, TimestampMs: 1}, }, @@ -950,7 +950,7 @@ func seriesResults(fail bool) (*int, http.Handler) { var lock sync.Mutex q := ThanosSeriesResponse{ Status: "success", - Data: []*labelpb.LabelSet{{Labels: []*labelpb.Label{{Name: "__name__", Value: "up"}, {Name: "foo", Value: "bar"}}}}, + Data: []labelpb.LabelSet{{Labels: []labelpb.Label{{Name: "__name__", Value: "up"}, {Name: "foo", Value: "bar"}}}}, } return &count, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { diff --git a/pkg/receive/handler.go b/pkg/receive/handler.go index cdf762f764..eb419eb752 100644 --- a/pkg/receive/handler.go +++ b/pkg/receive/handler.go @@ -441,7 +441,7 @@ type endpointReplica struct { type trackedSeries struct { seriesIDs []int - timeSeries []*prompb.TimeSeries + timeSeries []prompb.TimeSeries } type writeResponse struct { @@ -817,7 +817,7 @@ func (h *Handler) fanoutForward(ctx context.Context, params remoteWriteParams) ( func (h *Handler) distributeTimeseriesToReplicas( tenantHTTP string, replicas []uint64, - timeseries []*prompb.TimeSeries, + timeseries []prompb.TimeSeries, ) (map[endpointReplica]map[string]trackedSeries, map[endpointReplica]map[string]trackedSeries, error) { h.mtx.RLock() defer h.mtx.RUnlock() @@ -843,7 +843,7 @@ func (h *Handler) distributeTimeseriesToReplicas( } for _, rn := range replicas { - endpoint, err := h.hashring.GetN(tenant, ts, rn) + endpoint, err := h.hashring.GetN(tenant, &ts, rn) if err != nil { return nil, nil, err } @@ -857,7 +857,7 @@ func (h *Handler) distributeTimeseriesToReplicas( writeDestination[endpointReplica] = map[string]trackedSeries{ tenant: { seriesIDs: make([]int, 0), - timeSeries: make([]*prompb.TimeSeries, 0), + timeSeries: make([]prompb.TimeSeries, 0), }, } } @@ -915,7 +915,7 @@ func (h *Handler) sendLocalWrite( span.SetTag("endpoint", writeDestination.endpoint) span.SetTag("replica", writeDestination.replica) - tenantSeriesMapping := map[string][]*prompb.TimeSeries{} + tenantSeriesMapping := map[string][]prompb.TimeSeries{} for _, ts := range trackedSeries.timeSeries { var tenant = tenantHTTP if h.splitTenantLabelName != "" { @@ -1044,7 +1044,7 @@ func (h *Handler) relabel(wreq *prompb.WriteRequest) { if len(h.options.RelabelConfigs) == 0 { return } - timeSeries := make([]*prompb.TimeSeries, 0, len(wreq.Timeseries)) + timeSeries := make([]prompb.TimeSeries, 0, len(wreq.Timeseries)) for _, ts := range wreq.Timeseries { var keep bool lbls, keep := relabel.Process(labelpb.LabelpbLabelsToPromLabels(ts.Labels), h.options.RelabelConfigs...) diff --git a/pkg/receive/handler_test.go b/pkg/receive/handler_test.go index 872801070b..997239d3bb 100644 --- a/pkg/receive/handler_test.go +++ b/pkg/receive/handler_test.go @@ -657,7 +657,7 @@ func testReceiveQuorum(t *testing.T, hashringAlgo HashringAlgorithm, withConsist var expected int n := a.appender.(*fakeAppender).Get(lset) got := uint64(len(n)) - if a.appenderErr == nil && endpointHit(t, hashring, tc.replicationFactor, handlers[j].options.Endpoint, tenant, ts) { + if a.appenderErr == nil && endpointHit(t, hashring, tc.replicationFactor, handlers[j].options.Endpoint, tenant, &ts) { // We have len(handlers) copies of each sample because the test case // is run once for each handler and they all use the same appender. expected = len(handlers) * len(ts.Samples) @@ -669,7 +669,7 @@ func testReceiveQuorum(t *testing.T, hashringAlgo HashringAlgorithm, withConsist var expectedMin int n := a.appender.(*fakeAppender).Get(lset) got := uint64(len(n)) - if a.appenderErr == nil && endpointHit(t, hashring, tc.replicationFactor, handlers[j].options.Endpoint, tenant, ts) { + if a.appenderErr == nil && endpointHit(t, hashring, tc.replicationFactor, handlers[j].options.Endpoint, tenant, &ts) { // We have len(handlers) copies of each sample because the test case // is run once for each handler and they all use the same appender. expectedMin = int((tc.replicationFactor/2)+1) * len(ts.Samples) @@ -785,16 +785,16 @@ func TestReceiveWriteRequestLimits(t *testing.T) { ) wreq := &prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{}, + Timeseries: []prompb.TimeSeries{}, } for i := 0; i < tc.amountSeries; i += 1 { label := labelpb.Label{Name: "foo", Value: "bar"} - series := &prompb.TimeSeries{ - Labels: []*labelpb.Label{&label}, + series := prompb.TimeSeries{ + Labels: []labelpb.Label{label}, } for j := 0; j < tc.amountSamples; j += 1 { - sample := &prompb.Sample{Value: float64(j), Timestamp: int64(j)} + sample := prompb.Sample{Value: float64(j), Timestamp: int64(j)} series.Samples = append(series.Samples, sample) } wreq.Timeseries = append(wreq.Timeseries, series) @@ -938,14 +938,14 @@ func (a *tsOverrideAppender) GetRef(lset labels.Labels, hash uint64) (storage.Se // serializeSeriesWithOneSample returns marshaled and compressed remote write requests like it would // be sent to Thanos receive. // It has one sample and allow passing multiple series, in same manner as typical Prometheus would batch it. -func serializeSeriesWithOneSample(t testing.TB, series [][]*labelpb.Label) []byte { - r := &prompb.WriteRequest{Timeseries: make([]*prompb.TimeSeries, 0, len(series))} +func serializeSeriesWithOneSample(t testing.TB, series [][]labelpb.Label) []byte { + r := &prompb.WriteRequest{Timeseries: make([]prompb.TimeSeries, 0, len(series))} for _, s := range series { - r.Timeseries = append(r.Timeseries, &prompb.TimeSeries{ + r.Timeseries = append(r.Timeseries, prompb.TimeSeries{ Labels: s, // Timestamp does not matter, it will be overridden. - Samples: []*prompb.Sample{{Value: math.MaxFloat64, Timestamp: math.MinInt64}}, + Samples: []prompb.Sample{{Value: math.MaxFloat64, Timestamp: math.MinInt64}}, }) } body, err := proto.Marshal(r) @@ -953,17 +953,17 @@ func serializeSeriesWithOneSample(t testing.TB, series [][]*labelpb.Label) []byt return snappy.Encode(nil, body) } -func makeSeriesWithValues(numSeries int) []*prompb.TimeSeries { - series := make([]*prompb.TimeSeries, numSeries) +func makeSeriesWithValues(numSeries int) []prompb.TimeSeries { + series := make([]prompb.TimeSeries, numSeries) for i := 0; i < numSeries; i++ { - series[i] = &prompb.TimeSeries{ - Labels: []*labelpb.Label{ + series[i] = prompb.TimeSeries{ + Labels: []labelpb.Label{ { Name: fmt.Sprintf("pod-%d", i), Value: fmt.Sprintf("nginx-%d", i), }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Value: float64(i), Timestamp: 10, @@ -1012,13 +1012,13 @@ func benchmarkHandlerMultiTSDBReceiveRemoteWrite(b testutil.TB) { }{ { name: "typical labels under 1KB, 500 of them", - writeRequest: serializeSeriesWithOneSample(b, func() [][]*labelpb.Label { - series := make([][]*labelpb.Label, 500) + writeRequest: serializeSeriesWithOneSample(b, func() [][]labelpb.Label { + series := make([][]labelpb.Label, 500) for s := 0; s < len(series); s++ { - lbls := make([]*labelpb.Label, 10) + lbls := make([]labelpb.Label, 10) for i := 0; i < len(lbls); i++ { // Label ~20B name, 50B value. - lbls[i] = &labelpb.Label{Name: fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), Value: fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)} + lbls[i] = labelpb.Label{Name: fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), Value: fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)} } series[s] = lbls } @@ -1027,13 +1027,13 @@ func benchmarkHandlerMultiTSDBReceiveRemoteWrite(b testutil.TB) { }, { name: "typical labels under 1KB, 5000 of them", - writeRequest: serializeSeriesWithOneSample(b, func() [][]*labelpb.Label { - series := make([][]*labelpb.Label, 5000) + writeRequest: serializeSeriesWithOneSample(b, func() [][]labelpb.Label { + series := make([][]labelpb.Label, 5000) for s := 0; s < len(series); s++ { - lbls := make([]*labelpb.Label, 10) + lbls := make([]labelpb.Label, 10) for i := 0; i < len(lbls); i++ { // Label ~20B name, 50B value. - lbls[i] = &labelpb.Label{Name: fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), Value: fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)} + lbls[i] = labelpb.Label{Name: fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), Value: fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)} } series[s] = lbls } @@ -1042,13 +1042,13 @@ func benchmarkHandlerMultiTSDBReceiveRemoteWrite(b testutil.TB) { }, { name: "typical labels under 1KB, 20000 of them", - writeRequest: serializeSeriesWithOneSample(b, func() [][]*labelpb.Label { - series := make([][]*labelpb.Label, 20000) + writeRequest: serializeSeriesWithOneSample(b, func() [][]labelpb.Label { + series := make([][]labelpb.Label, 20000) for s := 0; s < len(series); s++ { - lbls := make([]*labelpb.Label, 10) + lbls := make([]labelpb.Label, 10) for i := 0; i < len(lbls); i++ { // Label ~20B name, 50B value. - lbls[i] = &labelpb.Label{Name: fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), Value: fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)} + lbls[i] = labelpb.Label{Name: fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), Value: fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)} } series[s] = lbls } @@ -1057,8 +1057,8 @@ func benchmarkHandlerMultiTSDBReceiveRemoteWrite(b testutil.TB) { }, { name: "extremely large label value 10MB, 10 of them", - writeRequest: serializeSeriesWithOneSample(b, func() [][]*labelpb.Label { - series := make([][]*labelpb.Label, 10) + writeRequest: serializeSeriesWithOneSample(b, func() [][]labelpb.Label { + series := make([][]labelpb.Label, 10) for s := 0; s < len(series); s++ { lbl := &strings.Builder{} lbl.Grow(1024 * 1024 * 10) // 10MB. @@ -1066,7 +1066,7 @@ func benchmarkHandlerMultiTSDBReceiveRemoteWrite(b testutil.TB) { for i := 0; i < lbl.Cap()/len(word); i++ { _, _ = lbl.WriteString(word) } - series[s] = []*labelpb.Label{{Name: "__name__", Value: lbl.String()}} + series[s] = []labelpb.Label{{Name: "__name__", Value: lbl.String()}} } return series }()), @@ -1213,9 +1213,9 @@ func TestRelabel(t *testing.T) { { name: "empty relabel configs", writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1225,7 +1225,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1235,9 +1235,9 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1247,7 +1247,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1269,9 +1269,9 @@ func TestRelabel(t *testing.T) { }, }, writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1281,7 +1281,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1291,9 +1291,9 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1303,7 +1303,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1330,9 +1330,9 @@ func TestRelabel(t *testing.T) { }, }, writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1342,7 +1342,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1352,9 +1352,9 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "foo", @@ -1364,7 +1364,7 @@ func TestRelabel(t *testing.T) { Value: "test", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1383,9 +1383,9 @@ func TestRelabel(t *testing.T) { }, }, writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1395,7 +1395,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1405,15 +1405,15 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1433,9 +1433,9 @@ func TestRelabel(t *testing.T) { }, }, writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1445,7 +1445,7 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: 0, Value: 1, @@ -1455,7 +1455,7 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{}, + Timeseries: []prompb.TimeSeries{}, }, }, { @@ -1467,9 +1467,9 @@ func TestRelabel(t *testing.T) { }, }, writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1479,9 +1479,9 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Exemplars: []*prompb.Exemplar{ + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "traceID", Value: "foo", @@ -1495,17 +1495,17 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", }, }, - Exemplars: []*prompb.Exemplar{ + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "traceID", Value: "foo", @@ -1529,9 +1529,9 @@ func TestRelabel(t *testing.T) { }, }, writeRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "__name__", Value: "test_metric", @@ -1541,9 +1541,9 @@ func TestRelabel(t *testing.T) { Value: "bar", }, }, - Exemplars: []*prompb.Exemplar{ + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "traceID", Value: "foo", @@ -1557,7 +1557,7 @@ func TestRelabel(t *testing.T) { }, }, expectedWriteRequest: prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{}, + Timeseries: []prompb.TimeSeries{}, }, }, } { @@ -1692,7 +1692,7 @@ func TestDistributeSeries(t *testing.T) { _, remote, err := h.distributeTimeseriesToReplicas( "foo", []uint64{0}, - []*prompb.TimeSeries{ + []prompb.TimeSeries{ { Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("a", "b", tenantIDLabelName, "bar")), }, @@ -1747,10 +1747,10 @@ func TestHandlerFlippingHashrings(t *testing.T) { } _, err := h.handleRequest(ctx, 0, "test", &prompb.WriteRequest{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("foo", "bar")), - Samples: []*prompb.Sample{ + Samples: []prompb.Sample{ { Timestamp: time.Now().Unix(), Value: 123, diff --git a/pkg/receive/hashring_test.go b/pkg/receive/hashring_test.go index ed6f437e54..12404faed6 100644 --- a/pkg/receive/hashring_test.go +++ b/pkg/receive/hashring_test.go @@ -20,7 +20,7 @@ import ( func TestHashringGet(t *testing.T) { ts := &prompb.TimeSeries{ - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "foo", Value: "bar", @@ -161,7 +161,7 @@ func TestHashringGet(t *testing.T) { func TestKetamaHashringGet(t *testing.T) { baseTS := &prompb.TimeSeries{ - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "pod", Value: "nginx", @@ -218,7 +218,7 @@ func TestKetamaHashringGet(t *testing.T) { name: "base case with different timeseries", endpoints: []Endpoint{{Address: "node-1"}, {Address: "node-2"}, {Address: "node-3"}}, ts: &prompb.TimeSeries{ - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "pod", Value: "thanos", @@ -382,7 +382,7 @@ func TestKetamaHashringEvenAZSpread(t *testing.T) { tenant := "default-tenant" ts := &prompb.TimeSeries{ Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("foo", "bar")), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 0}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: 0}}, } for _, tt := range []struct { @@ -555,7 +555,7 @@ func TestKetamaHashringEvenNodeSpread(t *testing.T) { for i := 0; i < int(tt.numSeries); i++ { ts := &prompb.TimeSeries{ Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("foo", fmt.Sprintf("%d", i))), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 0}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: 0}}, } for j := 0; j < int(tt.replicas); j++ { r, err := hashRing.GetN(tenant, ts, uint64(j)) @@ -597,7 +597,7 @@ func makeSeries() []prompb.TimeSeries { series := make([]prompb.TimeSeries, numSeries) for i := 0; i < numSeries; i++ { series[i] = prompb.TimeSeries{ - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ { Name: "pod", Value: fmt.Sprintf("nginx-%d", i), diff --git a/pkg/receive/multitsdb.go b/pkg/receive/multitsdb.go index 439d441b63..699533a234 100644 --- a/pkg/receive/multitsdb.go +++ b/pkg/receive/multitsdb.go @@ -187,14 +187,14 @@ func (l *localClient) TimeRange() (mint int64, maxt int64) { return l.store.TimeRange() } -func (l *localClient) TSDBInfos() []*infopb.TSDBInfo { +func (l *localClient) TSDBInfos() []infopb.TSDBInfo { labelsets := l.store.LabelSet() if len(labelsets) == 0 { - return []*infopb.TSDBInfo{} + return []infopb.TSDBInfo{} } mint, maxt := l.store.TimeRange() - return []*infopb.TSDBInfo{ + return []infopb.TSDBInfo{ { Labels: labelsets[0], MinTime: mint, diff --git a/pkg/receive/multitsdb_test.go b/pkg/receive/multitsdb_test.go index 283ca3b709..3308063fb7 100644 --- a/pkg/receive/multitsdb_test.go +++ b/pkg/receive/multitsdb_test.go @@ -202,12 +202,12 @@ func TestMultiTSDB(t *testing.T) { var ( expectedFooResp = &storepb.Series{ - Labels: []*labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "foo"}}, - Chunks: []*storepb.AggrChunk{{MinTime: 1, MaxTime: 3, Raw: &storepb.Chunk{Data: []byte("\000\003\002@\003L\235\2354X\315\001\330\r\257Mui\251\327:U"), Hash: 9768694233508509040}}}, + Labels: []labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "foo"}}, + Chunks: []storepb.AggrChunk{{MinTime: 1, MaxTime: 3, Raw: &storepb.Chunk{Data: []byte("\000\003\002@\003L\235\2354X\315\001\330\r\257Mui\251\327:U"), Hash: 9768694233508509040}}}, } expectedBarResp = &storepb.Series{ - Labels: []*labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "bar"}}, - Chunks: []*storepb.AggrChunk{{MinTime: 1, MaxTime: 3, Raw: &storepb.Chunk{Data: []byte("\000\003\002@4i\223\263\246\213\032\001\330\035i\337\322\352\323S\256t\270"), Hash: 2304287992246504442}}}, + Labels: []labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "bar"}}, + Chunks: []storepb.AggrChunk{{MinTime: 1, MaxTime: 3, Raw: &storepb.Chunk{Data: []byte("\000\003\002@4i\223\263\246\213\032\001\330\035i\337\322\352\323S\256t\270"), Hash: 2304287992246504442}}}, } ) @@ -262,7 +262,7 @@ func getResponses(storeClient store.Client, respCh chan<- *storepb.Series) error sc, err := storeClient.Series(context.Background(), &storepb.SeriesRequest{ MinTime: 0, MaxTime: 10, - Matchers: []*storepb.LabelMatcher{{Name: "a", Value: ".*", Type: storepb.LabelMatcher_RE}}, + Matchers: []storepb.LabelMatcher{{Name: "a", Value: ".*", Type: storepb.LabelMatcher_RE}}, }) if err != nil { return err @@ -287,21 +287,21 @@ func getResponses(storeClient store.Client, respCh chan<- *storepb.Series) error var ( expectedFooRespExemplars = []exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "foo"}}}, + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "foo"}}}, Exemplars: []*exemplarspb.Exemplar{ - {Labels: &labelpb.LabelSet{}, Value: 1, Ts: 1}, - {Labels: &labelpb.LabelSet{}, Value: 2.1212, Ts: 2}, - {Labels: &labelpb.LabelSet{}, Value: 3.1313, Ts: 3}, + {Value: 1, Ts: 1}, + {Value: 2.1212, Ts: 2}, + {Value: 3.1313, Ts: 3}, }, }, } expectedBarRespExemplars = []exemplarspb.ExemplarData{ { - SeriesLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "bar"}}}, + SeriesLabels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "replica", Value: "01"}, {Name: "tenant_id", Value: "bar"}}}, Exemplars: []*exemplarspb.Exemplar{ - {Value: 11, Ts: 1, Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "traceID", Value: "abc"}}}}, - {Value: 22.1212, Ts: 2, Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "traceID", Value: "def"}}}}, - {Value: 33.1313, Ts: 3, Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "traceID", Value: "ghi"}}}}, + {Value: 11, Ts: 1, Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "traceID", Value: "abc"}}}}, + {Value: 22.1212, Ts: 2, Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "traceID", Value: "def"}}}}, + {Value: 33.1313, Ts: 3, Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "traceID", Value: "ghi"}}}}, }, }, } @@ -410,7 +410,7 @@ func checkExemplarsResponse(t *testing.T, expected, data []exemplarspb.ExemplarD testutil.Equals(t, expected[i].SeriesLabels, data[i].SeriesLabels) testutil.Equals(t, len(expected[i].Exemplars), len(data[i].Exemplars)) for j := range data[i].Exemplars { - testutil.Equals(t, expected[i].Exemplars[j], data[i].Exemplars[j]) + testutil.Equals(t, *expected[i].Exemplars[j], *data[i].Exemplars[j]) } } } diff --git a/pkg/receive/writer.go b/pkg/receive/writer.go index ecc98835ed..cb84f8143b 100644 --- a/pkg/receive/writer.go +++ b/pkg/receive/writer.go @@ -182,9 +182,9 @@ func (r *Writer) Write(ctx context.Context, tenantID string, wreq *prompb.WriteR ) if hp.IsFloatHistogram() { - fh = prompb.FloatHistogramProtoToFloatHistogram(*hp) + fh = prompb.FloatHistogramProtoToFloatHistogram(hp) } else { - h = prompb.HistogramProtoToHistogram(*hp) + h = prompb.HistogramProtoToHistogram(hp) } ref, err = app.AppendHistogram(ref, lset, hp.Timestamp, h, fh) diff --git a/pkg/receive/writer_test.go b/pkg/receive/writer_test.go index ba6c6940dd..6e665b97e0 100644 --- a/pkg/receive/writer_test.go +++ b/pkg/receive/writer_test.go @@ -30,24 +30,24 @@ import ( func TestWriter(t *testing.T) { now := model.Now() - lbls := []*labelpb.Label{{Name: "__name__", Value: "test"}} + lbls := []labelpb.Label{{Name: "__name__", Value: "test"}} tests := map[string]struct { reqs []*prompb.WriteRequest expectedErr error - expectedIngested []*prompb.TimeSeries + expectedIngested []prompb.TimeSeries maxExemplars int64 opts *WriterOptions }{ "should error out on series with no labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, { - Labels: []*labelpb.Label{{Name: "__name__", Value: ""}}, - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: []labelpb.Label{{Name: "__name__", Value: ""}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, @@ -57,29 +57,29 @@ func TestWriter(t *testing.T) { "should succeed on series with valid labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, }, expectedErr: nil, - expectedIngested: []*prompb.TimeSeries{ + expectedIngested: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, "should error out and skip series with out-of-order labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "1"}, &labelpb.Label{Name: "Z", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "1"}, labelpb.Label{Name: "Z", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, @@ -89,10 +89,10 @@ func TestWriter(t *testing.T) { "should error out and skip series with duplicate labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}, &labelpb.Label{Name: "z", Value: "1"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}, labelpb.Label{Name: "z", Value: "1"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, @@ -102,46 +102,46 @@ func TestWriter(t *testing.T) { "should error out and skip series with out-of-order labels; accept series with valid labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "A", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "A", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, { - Labels: append(lbls, &labelpb.Label{Name: "c", Value: "1"}, &labelpb.Label{Name: "d", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "c", Value: "1"}, labelpb.Label{Name: "d", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, { - Labels: append(lbls, &labelpb.Label{Name: "E", Value: "1"}, &labelpb.Label{Name: "f", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "E", Value: "1"}, labelpb.Label{Name: "f", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, }, expectedErr: errors.Wrapf(labelpb.ErrOutOfOrderLabels, "add 2 series"), - expectedIngested: []*prompb.TimeSeries{ + expectedIngested: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "c", Value: "1"}, &labelpb.Label{Name: "d", Value: "2"}), - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, + Labels: append(lbls, labelpb.Label{Name: "c", Value: "1"}, labelpb.Label{Name: "d", Value: "2"}), + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, }, }, }, "should succeed when sample timestamp is NOT too far in the future": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: lbls, - Samples: []*prompb.Sample{{Value: 1, Timestamp: int64(now)}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: int64(now)}}, }, }, }, }, expectedErr: nil, - expectedIngested: []*prompb.TimeSeries{ + expectedIngested: []prompb.TimeSeries{ { Labels: lbls, - Samples: []*prompb.Sample{{Value: 1, Timestamp: int64(now)}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: int64(now)}}, }, }, opts: &WriterOptions{TooFarInFutureTimeWindow: 30 * int64(time.Second)}, @@ -149,11 +149,11 @@ func TestWriter(t *testing.T) { "should error out when sample timestamp is too far in the future": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: lbls, // A sample with a very large timestamp in year 5138 (milliseconds) - Samples: []*prompb.Sample{{Value: 1, Timestamp: 99999999999999}}, + Samples: []prompb.Sample{{Value: 1, Timestamp: 99999999999999}}, }, }, }, @@ -163,19 +163,19 @@ func TestWriter(t *testing.T) { }, "should succeed on valid series with exemplars": { reqs: []*prompb.WriteRequest{{ - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: lbls, // Ingesting an exemplar requires a sample to create the series first. - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, - Exemplars: []*prompb.Exemplar{ + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{{Name: "traceID", Value: "123"}}, + Labels: []labelpb.Label{{Name: "traceID", Value: "123"}}, Value: 111, Timestamp: 11, }, { - Labels: []*labelpb.Label{{Name: "traceID", Value: "234"}}, + Labels: []labelpb.Label{{Name: "traceID", Value: "234"}}, Value: 112, Timestamp: 12, }, @@ -189,14 +189,14 @@ func TestWriter(t *testing.T) { "should error out on valid series with out of order exemplars": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: lbls, // Ingesting an exemplar requires a sample to create the series first. - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, - Exemplars: []*prompb.Exemplar{ + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{{Name: "traceID", Value: "123"}}, + Labels: []labelpb.Label{{Name: "traceID", Value: "123"}}, Value: 111, Timestamp: 11, }, @@ -205,12 +205,12 @@ func TestWriter(t *testing.T) { }, }, { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: lbls, - Exemplars: []*prompb.Exemplar{ + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{{Name: "traceID", Value: "1234"}}, + Labels: []labelpb.Label{{Name: "traceID", Value: "1234"}}, Value: 111, Timestamp: 10, }, @@ -225,14 +225,14 @@ func TestWriter(t *testing.T) { "should error out when exemplar label length exceeds the limit": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { Labels: lbls, // Ingesting an exemplar requires a sample to create the series first. - Samples: []*prompb.Sample{{Value: 1, Timestamp: 10}}, - Exemplars: []*prompb.Exemplar{ + Samples: []prompb.Sample{{Value: 1, Timestamp: 10}}, + Exemplars: []prompb.Exemplar{ { - Labels: []*labelpb.Label{{Name: strings.Repeat("a", exemplar.ExemplarMaxLabelSetLength), Value: "1"}}, + Labels: []labelpb.Label{{Name: strings.Repeat("a", exemplar.ExemplarMaxLabelSetLength), Value: "1"}}, Value: 111, Timestamp: 11, }, @@ -247,10 +247,10 @@ func TestWriter(t *testing.T) { "should succeed on histogram with valid labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.HistogramToHistogramProto(10, tsdbutil.GenerateTestHistogram(0)), }, }, @@ -258,10 +258,10 @@ func TestWriter(t *testing.T) { }, }, expectedErr: nil, - expectedIngested: []*prompb.TimeSeries{ + expectedIngested: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.HistogramToHistogramProto(10, tsdbutil.GenerateTestHistogram(0)), }, }, @@ -270,10 +270,10 @@ func TestWriter(t *testing.T) { "should succeed on float histogram with valid labels": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.FloatHistogramToHistogramProto(10, tsdbutil.GenerateTestFloatHistogram(1)), }, }, @@ -281,10 +281,10 @@ func TestWriter(t *testing.T) { }, }, expectedErr: nil, - expectedIngested: []*prompb.TimeSeries{ + expectedIngested: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.FloatHistogramToHistogramProto(10, tsdbutil.GenerateTestFloatHistogram(1)), }, }, @@ -293,20 +293,20 @@ func TestWriter(t *testing.T) { "should error out on valid histograms with out of order histogram": { reqs: []*prompb.WriteRequest{ { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.HistogramToHistogramProto(10, tsdbutil.GenerateTestHistogram(0)), }, }, }, }, { - Timeseries: []*prompb.TimeSeries{ + Timeseries: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.HistogramToHistogramProto(9, tsdbutil.GenerateTestHistogram(0)), }, }, @@ -314,10 +314,10 @@ func TestWriter(t *testing.T) { }, }, expectedErr: errors.Wrapf(storage.ErrOutOfOrderSample, "add 1 samples"), - expectedIngested: []*prompb.TimeSeries{ + expectedIngested: []prompb.TimeSeries{ { - Labels: append(lbls, &labelpb.Label{Name: "a", Value: "1"}, &labelpb.Label{Name: "b", Value: "2"}), - Histograms: []*prompb.Histogram{ + Labels: append(lbls, labelpb.Label{Name: "a", Value: "1"}, labelpb.Label{Name: "b", Value: "2"}), + Histograms: []prompb.Histogram{ prompb.HistogramToHistogramProto(10, tsdbutil.GenerateTestHistogram(0)), }, }, @@ -488,26 +488,26 @@ func benchmarkWriter(b *testing.B, labelsNum int, seriesNum int, generateHistogr // duplicates without error (see comment https://github.com/prometheus/prometheus/blob/release-2.37/tsdb/head_append.go#L316). // This also means the sample won't be appended, which means the overhead of appending additional samples to head is not // reflected in the benchmark, but should still capture the performance of receive writer. -func generateLabelsAndSeries(numLabels int, numSeries int, generateHistograms bool) []*prompb.TimeSeries { +func generateLabelsAndSeries(numLabels int, numSeries int, generateHistograms bool) []prompb.TimeSeries { // Generate some labels first. - l := make([]*labelpb.Label, 0, numLabels) - l = append(l, &labelpb.Label{Name: "__name__", Value: "test"}) + l := make([]labelpb.Label, 0, numLabels) + l = append(l, labelpb.Label{Name: "__name__", Value: "test"}) for i := 0; i < numLabels; i++ { - l = append(l, &labelpb.Label{Name: fmt.Sprintf("label_%s", string(rune('a'+i))), Value: fmt.Sprintf("%d", i)}) + l = append(l, labelpb.Label{Name: fmt.Sprintf("label_%s", string(rune('a'+i))), Value: fmt.Sprintf("%d", i)}) } - ts := make([]*prompb.TimeSeries, numSeries) + ts := make([]prompb.TimeSeries, numSeries) for j := 0; j < numSeries; j++ { - ts[j] = &prompb.TimeSeries{ + ts[j] = prompb.TimeSeries{ Labels: l, } if generateHistograms { - ts[j].Histograms = []*prompb.Histogram{prompb.HistogramToHistogramProto(10, tsdbutil.GenerateTestHistogram(0))} + ts[j].Histograms = []prompb.Histogram{prompb.HistogramToHistogramProto(10, tsdbutil.GenerateTestHistogram(0))} continue } - ts[j].Samples = []*prompb.Sample{{Value: 1, Timestamp: 10}} + ts[j].Samples = []prompb.Sample{{Value: 1, Timestamp: 10}} } return ts diff --git a/pkg/rules/manager.go b/pkg/rules/manager.go index 503b45e540..e93b6f30aa 100644 --- a/pkg/rules/manager.go +++ b/pkg/rules/manager.go @@ -46,7 +46,7 @@ func (g Group) toProto() *rulespb.RuleGroup { Limit: int64(g.Limit()), PartialResponseStrategy: g.PartialResponseStrategy, // UTC needed due to https://github.com/gogo/protobuf/issues/519. - LastEvaluation: rulespb.TimeToTimestamp(g.GetLastEvaluation().UTC()), + LastEvaluation: g.GetLastEvaluation().UTC(), EvaluationDurationSeconds: g.GetEvaluationTime().Seconds(), } @@ -65,26 +65,26 @@ func (g Group) toProto() *rulespb.RuleGroup { Query: rule.Query().String(), DurationSeconds: rule.HoldDuration().Seconds(), KeepFiringForSeconds: rule.KeepFiringFor().Seconds(), - Labels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(rule.Labels())}, - Annotations: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(rule.Annotations())}, + Labels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(rule.Labels())}, + Annotations: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(rule.Annotations())}, Alerts: ActiveAlertsToProto(g.PartialResponseStrategy, rule), Health: string(rule.Health()), LastError: lastError, EvaluationDurationSeconds: rule.GetEvaluationDuration().Seconds(), // UTC needed due to https://github.com/gogo/protobuf/issues/519. - LastEvaluation: rulespb.TimeToTimestamp(rule.GetEvaluationTimestamp().UTC()), + LastEvaluation: rule.GetEvaluationTimestamp().UTC(), }}}) case *rules.RecordingRule: ret.Rules = append(ret.Rules, &rulespb.Rule{ Result: &rulespb.Rule_Recording{Recording: &rulespb.RecordingRule{ Name: rule.Name(), Query: rule.Query().String(), - Labels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(rule.Labels())}, + Labels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(rule.Labels())}, Health: string(rule.Health()), LastError: lastError, EvaluationDurationSeconds: rule.GetEvaluationDuration().Seconds(), // UTC needed due to https://github.com/gogo/protobuf/issues/519. - LastEvaluation: rulespb.TimeToTimestamp(rule.GetEvaluationTimestamp().UTC()), + LastEvaluation: rule.GetEvaluationTimestamp().UTC(), }}}) default: // We cannot do much, let's panic, API will recover. @@ -102,10 +102,10 @@ func ActiveAlertsToProto(s storepb.PartialResponseStrategy, a *rules.AlertingRul activeAt := ruleAlert.ActiveAt.UTC() ret[i] = &rulespb.AlertInstance{ PartialResponseStrategy: s, - Labels: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ruleAlert.Labels)}, - Annotations: &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ruleAlert.Annotations)}, + Labels: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ruleAlert.Labels)}, + Annotations: labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ruleAlert.Annotations)}, State: rulespb.AlertState(ruleAlert.State), - ActiveAt: rulespb.TimeToTimestamp(activeAt), + ActiveAt: &activeAt, Value: strconv.FormatFloat(ruleAlert.Value, 'e', -1, 64), } } @@ -403,7 +403,7 @@ func (m *Manager) Rules(r *rulespb.RulesRequest, s rulespb.Rules_RulesServer) (e pgs := make([]*rulespb.RuleGroup, 0, len(groups)) for _, g := range groups { // UTC needed due to https://github.com/gogo/protobuf/issues/519. - g.LastEvaluation = rulespb.TimeToTimestamp(rulespb.TimestampToTime(g.LastEvaluation).UTC()) + g.LastEvaluation = g.LastEvaluation.UTC() if r.Type == rulespb.RulesRequest_ALL { pgs = append(pgs, g) continue diff --git a/pkg/rules/rules_test.go b/pkg/rules/rules_test.go index 84bf8ce113..04e4abc1bc 100644 --- a/pkg/rules/rules_test.go +++ b/pkg/rules/rules_test.go @@ -201,7 +201,7 @@ func testRulesAgainstExamples(t *testing.T, dir string, server rulespb.RulesServ } // Mask nondeterministic fields. got[i].EvaluationDurationSeconds = 0 - got[i].LastEvaluation = nil + got[i].LastEvaluation = time.Time{} t.Run(got[i].Name+" "+path.Base(got[i].File), func(t *testing.T) { testutil.Equals(t, expectedForType[i], got[i]) @@ -275,34 +275,34 @@ func TestDedupRules(t *testing.T) { rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, want: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -313,30 +313,30 @@ func TestDedupRules(t *testing.T) { rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -344,17 +344,17 @@ func TestDedupRules(t *testing.T) { rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -365,39 +365,39 @@ func TestDedupRules(t *testing.T) { rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", DurationSeconds: 1.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", DurationSeconds: 1.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", DurationSeconds: 2.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -406,22 +406,22 @@ func TestDedupRules(t *testing.T) { Name: "a1", Query: "up", DurationSeconds: 1.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", DurationSeconds: 2.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "a1", Query: "up", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "a1", Query: "up", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -433,7 +433,7 @@ func TestDedupRules(t *testing.T) { Name: "a1", Query: "up", DurationSeconds: 1.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, {Name: "replica", Value: "1"}, }}}), @@ -441,7 +441,7 @@ func TestDedupRules(t *testing.T) { Name: "a1", Query: "up", DurationSeconds: 2.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -450,14 +450,14 @@ func TestDedupRules(t *testing.T) { Name: "a1", Query: "up", DurationSeconds: 1.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", Query: "up", DurationSeconds: 2.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -466,21 +466,21 @@ func TestDedupRules(t *testing.T) { { name: "replica labels", rules: []*rulespb.Rule{ - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, {Name: "replica", Value: "3"}, }}}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, {Name: "replica", Value: "1"}, }}}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, {Name: "replica", Value: "2"}, }}}), }, want: []*rulespb.Rule{ - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), }, @@ -489,28 +489,28 @@ func TestDedupRules(t *testing.T) { { name: "ambiguous replica labels", rules: []*rulespb.Rule{ - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "a", Value: "1"}, }}}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, }}}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "a", Value: "2"}, }}}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "a", Value: "1"}, }}}), }, want: []*rulespb.Rule{ rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1"}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), - rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + rulespb.NewRecordingRule(&rulespb.RecordingRule{Name: "a1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "2"}, }}}), }, @@ -522,38 +522,38 @@ func TestDedupRules(t *testing.T) { rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "2"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(0, 0)), + LastEvaluation: time.Unix(0, 0), }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "3"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(3, 0)), + LastEvaluation: time.Unix(3, 0), }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastEvaluation: time.Unix(2, 0), }), }, want: []*rulespb.Rule{ rulespb.NewRecordingRule(&rulespb.RecordingRule{ Name: "a1", Query: "up", - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(3, 0)), + LastEvaluation: time.Unix(3, 0), }), }, replicaLabels: []string{"replica"}, @@ -563,74 +563,74 @@ func TestDedupRules(t *testing.T) { rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "2"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(4, 0)), + LastEvaluation: time.Unix(4, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "2"}, {Name: "foo", Value: "bar"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastEvaluation: time.Unix(2, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a2", - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastEvaluation: time.Unix(2, 0), State: rulespb.AlertState_PENDING, }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(3, 0)), + LastEvaluation: time.Unix(3, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a2", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(3, 0)), + LastEvaluation: time.Unix(3, 0), State: rulespb.AlertState_PENDING, }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "3"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastEvaluation: time.Unix(2, 0), State: rulespb.AlertState_FIRING, }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "foo", Value: "bar"}, }}, State: rulespb.AlertState_FIRING, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), }, want: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ State: rulespb.AlertState_FIRING, Name: "a1", - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastEvaluation: time.Unix(2, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ State: rulespb.AlertState_FIRING, Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "foo", Value: "bar"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ State: rulespb.AlertState_PENDING, Name: "a2", - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(3, 0)), + LastEvaluation: time.Unix(3, 0), }), }, replicaLabels: []string{"replica"}, @@ -640,51 +640,51 @@ func TestDedupRules(t *testing.T) { rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "severity", Value: "warning"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "severity", Value: "critical"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "2"}, {Name: "severity", Value: "warning"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "2"}, {Name: "severity", Value: "critical"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), }, want: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "severity", Value: "critical"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "severity", Value: "warning"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), }, replicaLabels: []string{"replica"}, @@ -694,35 +694,35 @@ func TestDedupRules(t *testing.T) { rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "2"}, {Name: "label", Value: "foo"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), }, want: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, - LastEvaluation: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastEvaluation: time.Unix(1, 0), }), }, replicaLabels: []string{"replica"}, @@ -998,18 +998,18 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r2", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r2", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1022,18 +1022,18 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r2", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r2", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1050,18 +1050,18 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r2", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r2", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1074,13 +1074,13 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1097,18 +1097,18 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r2", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r2", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1126,14 +1126,14 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, {Name: "templatedlabel", Value: "{{ $externalURL }}"}, }}, }), rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a2", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1151,17 +1151,17 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1a", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1a", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1172,12 +1172,12 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1b", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "some", Value: "label"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1190,12 +1190,12 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1a", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1a", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1205,7 +1205,7 @@ func TestFilterRules(t *testing.T) { Name: "b", Rules: []*rulespb.Rule{ rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1222,17 +1222,17 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1a", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1a", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1243,12 +1243,12 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1b", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "some", Value: "label"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1266,7 +1266,7 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "templatedlabel", Value: "{{ $externalURL }}"}, }}, }), @@ -1277,7 +1277,7 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1b", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "templated", Value: "{{ $externalURL }}"}, }}, }), @@ -1295,7 +1295,7 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "templatedlabel", Value: "{{ $externalURL }}"}, }}, }), @@ -1306,7 +1306,7 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1b", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "templated", Value: "{{ $externalURL }}"}, }}, }), @@ -1324,17 +1324,17 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1a", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1a", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1345,12 +1345,12 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1b", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "some", Value: "label"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1363,12 +1363,12 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1a", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1a", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1378,7 +1378,7 @@ func TestFilterRules(t *testing.T) { Name: "b", Rules: []*rulespb.Rule{ rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1b", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1b", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), @@ -1395,18 +1395,18 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r2", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r2", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1419,7 +1419,7 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, @@ -1650,18 +1650,18 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r1", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r1", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "label", Value: "foo"}, }}, }), rulespb.NewRecordingRule(&rulespb.RecordingRule{ - Name: "r2", Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Name: "r2", Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "otherlabel", Value: "bar"}, }}, }), @@ -1674,7 +1674,7 @@ func TestFilterRules(t *testing.T) { Rules: []*rulespb.Rule{ rulespb.NewAlertingRule(&rulespb.Alert{ Name: "a1", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "replica", Value: "1"}, {Name: "label", Value: "foo"}, }}, diff --git a/pkg/rules/rulespb/custom.go b/pkg/rules/rulespb/custom.go index 7e261ff328..bb1ee31996 100644 --- a/pkg/rules/rulespb/custom.go +++ b/pkg/rules/rulespb/custom.go @@ -21,50 +21,6 @@ const ( RuleAlertingType = "alerting" ) -func TimestampToTime(ts *Timestamp) time.Time { - var tm time.Time - if ts == nil { - tm = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp - } else { - tm = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC() - } - return tm -} - -func TimeToTimestamp(t time.Time) *Timestamp { - if t.IsZero() { - ts := &Timestamp{} - ts.Seconds = time.Time{}.Unix() - return ts - } - ts := &Timestamp{ - Seconds: t.Unix(), - Nanos: int32(t.Nanosecond()), - } - - return ts -} - -func (m *Timestamp) MarshalJSON() ([]byte, error) { - ret := TimestampToTime(m) - return json.Marshal(ret) -} - -func (m *Timestamp) UnmarshalJSON(data []byte) error { - ret := time.Time{} - err := json.Unmarshal(data, &ret) - if err != nil { - return err - } - - actualTimestamp := TimeToTimestamp(ret) - - m.Seconds = actualTimestamp.Seconds - m.Nanos = actualTimestamp.Nanos - - return nil -} - func NewRuleGroupRulesResponse(rg *RuleGroup) *RulesResponse { return &RulesResponse{ Result: &RulesResponse_Group{ @@ -99,11 +55,11 @@ func NewRecordingRule(r *RecordingRule) *Rule { // // Note: This method assumes r1 and r2 are logically equal as per Rule#Compare. func (r1 *RecordingRule) Compare(r2 *RecordingRule) int { - if TimestampToTime(r1.LastEvaluation).Before(TimestampToTime(r2.LastEvaluation)) { + if r1.LastEvaluation.Before(r2.LastEvaluation) { return 1 } - if TimestampToTime(r1.LastEvaluation).After(TimestampToTime(r2.LastEvaluation)) { + if r1.LastEvaluation.After(r2.LastEvaluation) { return -1 } @@ -128,10 +84,10 @@ func (r *Rule) GetLabels() labels.Labels { } func (r *Rule) SetLabels(ls labels.Labels) { - var result *labelpb.LabelSet + var result labelpb.LabelSet if !ls.IsEmpty() { - result = &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} + result = labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} } switch { @@ -164,14 +120,14 @@ func (r *Rule) GetQuery() string { } } -func (r *Rule) GetLastEvaluation() *Timestamp { +func (r *Rule) GetLastEvaluation() time.Time { switch { case r.GetRecording() != nil: return r.GetRecording().LastEvaluation case r.GetAlert() != nil: return r.GetAlert().LastEvaluation default: - return &Timestamp{} + return time.Time{} } } @@ -295,12 +251,6 @@ func (m *Rule) MarshalJSON() ([]byte, error) { // Ensure that empty slices are marshaled as '[]' and not 'null'. a.Alerts = make([]*AlertInstance, 0) } - - for _, ai := range a.Alerts { - if ai.ActiveAt == nil { - ai.ActiveAt = &Timestamp{} - } - } return json.Marshal(struct { *Alert Type string `json:"type"` @@ -315,13 +265,6 @@ func (r *RuleGroup) MarshalJSON() ([]byte, error) { // Ensure that empty slices are marshaled as '[]' and not 'null'. r.Rules = make([]*Rule, 0) } - for _, r := range r.Rules { - if r.GetAlert() != nil { - if r.GetAlert().Annotations == nil { - r.GetAlert().Annotations = &labelpb.LabelSet{} - } - } - } type plain RuleGroup return json.Marshal((*plain)(r)) } @@ -376,11 +319,11 @@ func (a1 *Alert) Compare(a2 *Alert) int { return d } - if TimestampToTime(a1.LastEvaluation).Before(TimestampToTime(a2.LastEvaluation)) { + if a1.LastEvaluation.Before(a2.LastEvaluation) { return 1 } - if TimestampToTime(a1.LastEvaluation).After(TimestampToTime(a2.LastEvaluation)) { + if a1.LastEvaluation.After(a2.LastEvaluation) { return -1 } diff --git a/pkg/rules/rulespb/custom_test.go b/pkg/rules/rulespb/custom_test.go index 860d565bb4..358258b469 100644 --- a/pkg/rules/rulespb/custom_test.go +++ b/pkg/rules/rulespb/custom_test.go @@ -5,6 +5,7 @@ package rulespb import ( "encoding/json" + "fmt" "testing" "time" @@ -18,9 +19,8 @@ import ( ) func TestJSONUnmarshalMarshal(t *testing.T) { - ntime := time.Now() - now := TimeToTimestamp(ntime) - twoHoursAgo := TimeToTimestamp(ntime.Add(2 * time.Hour)) + now := time.Now() + twoHoursAgo := now.Add(2 * time.Hour) for _, tcase := range []struct { name string @@ -44,7 +44,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { Name: "group1", File: "file1.yml", Interval: 2442, - LastEvaluation: TimestampToTime(now), + LastEvaluation: now, EvaluationTime: 2.1, PartialResponseStrategy: "ABORT", }, @@ -95,7 +95,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { }, File: "file1.yml", Interval: 2442, - LastEvaluation: TimestampToTime(now), + LastEvaluation: now, EvaluationTime: 2.1, PartialResponseStrategy: "ABORT", }, @@ -117,7 +117,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { }, File: "file1.yml", Interval: 2442, - LastEvaluation: TimestampToTime(now), + LastEvaluation: now, EvaluationTime: 2.1, PartialResponseStrategy: "ABORT", }, @@ -140,7 +140,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { }, File: "file1.yml", Interval: 2442, - LastEvaluation: TimestampToTime(now), + LastEvaluation: now, EvaluationTime: 2.1, PartialResponseStrategy: "ABORT", }, @@ -156,7 +156,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { Name: "group1", File: "file1.yml", Interval: 2442, - LastEvaluation: TimestampToTime(now), + LastEvaluation: now, EvaluationTime: 2.1, PartialResponseStrategy: "asdfsdfsdfsd", }, @@ -200,14 +200,14 @@ func TestJSONUnmarshalMarshal(t *testing.T) { NewAlertingRule(&Alert{ Name: "alert1", Query: "up == 0", - Labels: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "a2", Value: "b2"}, {Name: "c2", Value: "d2"}, }, }, - Annotations: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Annotations: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "ann1", Value: "ann44"}, {Name: "ann2", Value: "ann33"}, }, @@ -248,7 +248,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { ), LastError: "2", Health: "health", - LastEvaluation: TimestampToTime(now).Add(-2 * time.Minute), + LastEvaluation: now.Add(-2 * time.Minute), EvaluationTime: 2.6, }, testpromcompatibility.AlertingRule{ @@ -269,7 +269,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { Labels: labels.FromStrings("instance1", "1"), Annotations: labels.FromStrings("annotation1", "2"), State: "inactive", - ActiveAt: time.Time{}, + ActiveAt: nil, Value: "1", PartialResponseStrategy: "WARN", }, @@ -277,7 +277,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { Labels: labels.EmptyLabels(), Annotations: labels.EmptyLabels(), State: "firing", - ActiveAt: TimestampToTime(twoHoursAgo), + ActiveAt: &twoHoursAgo, Value: "2143", PartialResponseStrategy: "ABORT", }, @@ -285,13 +285,13 @@ func TestJSONUnmarshalMarshal(t *testing.T) { LastError: "1", Duration: 60, State: "pending", - LastEvaluation: TimestampToTime(now).Add(-1 * time.Minute), + LastEvaluation: now.Add(-1 * time.Minute), EvaluationTime: 1.1, }, }, File: "file1.yml", Interval: 2442, - LastEvaluation: TimestampToTime(now), + LastEvaluation: now, EvaluationTime: 2.1, PartialResponseStrategy: "ABORT", }, @@ -299,7 +299,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { Name: "group2", File: "file2.yml", Interval: 242342442, - LastEvaluation: TimestampToTime(now).Add(40 * time.Hour), + LastEvaluation: now.Add(40 * time.Hour), EvaluationTime: 21244.1, PartialResponseStrategy: "ABORT", }, @@ -313,41 +313,41 @@ func TestJSONUnmarshalMarshal(t *testing.T) { NewRecordingRule(&RecordingRule{ Query: "up", Name: "recording1", - Labels: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "a", Value: "b"}, {Name: "c", Value: "d"}, }, }, LastError: "2", Health: "health", - LastEvaluation: TimeToTimestamp(TimestampToTime(now).Add(-2 * time.Minute)), + LastEvaluation: now.Add(-2 * time.Minute), EvaluationDurationSeconds: 2.6, }), NewAlertingRule(&Alert{ Name: "alert1", Query: "up == 0", - Labels: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "a2", Value: "b2"}, {Name: "c2", Value: "d2"}, }, }, - Annotations: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Annotations: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "ann1", Value: "ann44"}, {Name: "ann2", Value: "ann33"}, }, }, Alerts: []*AlertInstance{ { - Labels: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "instance1", Value: "1"}, }, }, - Annotations: &labelpb.LabelSet{ - Labels: []*labelpb.Label{ + Annotations: labelpb.LabelSet{ + Labels: []labelpb.Label{ {Name: "annotation1", Value: "2"}, }, }, @@ -358,7 +358,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { }, { State: AlertState_FIRING, - ActiveAt: twoHoursAgo, + ActiveAt: &twoHoursAgo, Value: "2143", PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -367,7 +367,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { State: AlertState_PENDING, LastError: "1", Health: "health2", - LastEvaluation: TimeToTimestamp(TimestampToTime(now).Add(-1 * time.Minute)), + LastEvaluation: now.Add(-1 * time.Minute), EvaluationDurationSeconds: 1.1, }), }, @@ -381,7 +381,7 @@ func TestJSONUnmarshalMarshal(t *testing.T) { Name: "group2", File: "file2.yml", Interval: 242342442, - LastEvaluation: TimeToTimestamp(TimestampToTime(now).Add(40 * time.Hour)), + LastEvaluation: now.Add(40 * time.Hour), EvaluationDurationSeconds: 21244.1, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, Rules: []*Rule{}, @@ -402,6 +402,8 @@ func TestJSONUnmarshalMarshal(t *testing.T) { return } testutil.Ok(t, err) + fmt.Println(proto.String()) + testutil.Equals(t, tcase.expectedProto.String(), proto.String()) jsonProto, err := json.Marshal(proto) testutil.Ok(t, err) @@ -449,7 +451,7 @@ func TestRulesComparator(t *testing.T) { r1: NewAlertingRule(&Alert{Name: "a"}), r2: NewAlertingRule(&Alert{ Name: "a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), want: -1, @@ -458,12 +460,12 @@ func TestRulesComparator(t *testing.T) { name: "label ordering", r1: NewAlertingRule(&Alert{ Name: "a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), r2: NewAlertingRule(&Alert{ Name: "a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "2"}, }}}), want: -1, @@ -472,12 +474,12 @@ func TestRulesComparator(t *testing.T) { name: "multiple label ordering", r1: NewAlertingRule(&Alert{ Name: "a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), r2: NewAlertingRule(&Alert{ Name: "a", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, {Name: "b", Value: "1"}, }}}), @@ -488,13 +490,13 @@ func TestRulesComparator(t *testing.T) { r1: NewAlertingRule(&Alert{ Name: "a", DurationSeconds: 0.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), r2: NewAlertingRule(&Alert{ Name: "a", DurationSeconds: 1.0, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "a", Value: "1"}, }}}), want: -1, diff --git a/pkg/rules/rulespb/rpc.pb.go b/pkg/rules/rulespb/rpc.pb.go index 7547383355..bb5ba32189 100644 --- a/pkg/rules/rulespb/rpc.pb.go +++ b/pkg/rules/rulespb/rpc.pb.go @@ -7,22 +7,28 @@ import ( context "context" encoding_binary "encoding/binary" fmt "fmt" + + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + io "io" math "math" math_bits "math/bits" + time "time" - proto "github.com/gogo/protobuf/proto" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" storepb "github.com/thanos-io/thanos/pkg/store/storepb" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -106,9 +112,6 @@ type RulesRequest struct { RuleName []string `protobuf:"bytes,4,rep,name=rule_name,json=ruleName,proto3" json:"rule_name,omitempty"` RuleGroup []string `protobuf:"bytes,5,rep,name=rule_group,json=ruleGroup,proto3" json:"rule_group,omitempty"` File []string `protobuf:"bytes,6,rep,name=file,proto3" json:"file,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *RulesRequest) Reset() { *m = RulesRequest{} } @@ -144,56 +147,11 @@ func (m *RulesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_RulesRequest proto.InternalMessageInfo -func (m *RulesRequest) GetType() RulesRequest_Type { - if m != nil { - return m.Type - } - return RulesRequest_ALL -} - -func (m *RulesRequest) GetPartialResponseStrategy() storepb.PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return storepb.PartialResponseStrategy_WARN -} - -func (m *RulesRequest) GetMatcherString() []string { - if m != nil { - return m.MatcherString - } - return nil -} - -func (m *RulesRequest) GetRuleName() []string { - if m != nil { - return m.RuleName - } - return nil -} - -func (m *RulesRequest) GetRuleGroup() []string { - if m != nil { - return m.RuleGroup - } - return nil -} - -func (m *RulesRequest) GetFile() []string { - if m != nil { - return m.File - } - return nil -} - type RulesResponse struct { // Types that are valid to be assigned to Result: // *RulesResponse_Group // *RulesResponse_Warning - Result isRulesResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isRulesResponse_Result `protobuf_oneof:"result"` } func (m *RulesResponse) Reset() { *m = RulesResponse{} } @@ -281,11 +239,7 @@ func (*RulesResponse) XXX_OneofWrappers() []interface{} { // / // / For rule parsing from YAML configuration other struct is used: https://github.com/prometheus/prometheus/blob/20b1f596f6fb16107ef0c244d240b0ad6da36829/pkg/rulefmt/rulefmt.go#L105 type RuleGroups struct { - // @gotags: json:"groups" - Groups []*RuleGroup `protobuf:"bytes,1,rep,name=groups,proto3" json:"groups"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Groups []*RuleGroup `protobuf:"bytes,1,rep,name=groups,proto3" json:"groups"` } func (m *RuleGroups) Reset() { *m = RuleGroups{} } @@ -321,35 +275,17 @@ func (m *RuleGroups) XXX_DiscardUnknown() { var xxx_messageInfo_RuleGroups proto.InternalMessageInfo -func (m *RuleGroups) GetGroups() []*RuleGroup { - if m != nil { - return m.Groups - } - return nil -} - // / RuleGroup has info for rules which are part of a group. type RuleGroup struct { - // @gotags: json:"name" - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` - // @gotags: json:"file" - File string `protobuf:"bytes,2,opt,name=file,proto3" json:"file"` - // @gotags: json:"rules" - Rules []*Rule `protobuf:"bytes,3,rep,name=rules,proto3" json:"rules"` - // @gotags: json:"interval" - Interval float64 `protobuf:"fixed64,4,opt,name=interval,proto3" json:"interval"` - // @gotags: json:"evaluationTime" - EvaluationDurationSeconds float64 `protobuf:"fixed64,5,opt,name=evaluation_duration_seconds,json=evaluationDurationSeconds,proto3" json:"evaluationTime"` - // @gotags: json:"lastEvaluation" - LastEvaluation *Timestamp `protobuf:"bytes,6,opt,name=last_evaluation,json=lastEvaluation,proto3" json:"lastEvaluation"` - // @gotags: json:"limit" - Limit int64 `protobuf:"varint,9,opt,name=limit,proto3" json:"limit"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` + File string `protobuf:"bytes,2,opt,name=file,proto3" json:"file"` + Rules []*Rule `protobuf:"bytes,3,rep,name=rules,proto3" json:"rules"` + Interval float64 `protobuf:"fixed64,4,opt,name=interval,proto3" json:"interval"` + EvaluationDurationSeconds float64 `protobuf:"fixed64,5,opt,name=evaluation_duration_seconds,json=evaluationDurationSeconds,proto3" json:"evaluationTime"` + LastEvaluation time.Time `protobuf:"bytes,6,opt,name=last_evaluation,json=lastEvaluation,proto3,stdtime" json:"lastEvaluation"` + Limit int64 `protobuf:"varint,9,opt,name=limit,proto3" json:"limit"` // Thanos specific. - // @gotags: json:"partialResponseStrategy" PartialResponseStrategy storepb.PartialResponseStrategy `protobuf:"varint,8,opt,name=PartialResponseStrategy,proto3,enum=thanos.PartialResponseStrategy" json:"partialResponseStrategy"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *RuleGroup) Reset() { *m = RuleGroup{} } @@ -385,71 +321,11 @@ func (m *RuleGroup) XXX_DiscardUnknown() { var xxx_messageInfo_RuleGroup proto.InternalMessageInfo -func (m *RuleGroup) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *RuleGroup) GetFile() string { - if m != nil { - return m.File - } - return "" -} - -func (m *RuleGroup) GetRules() []*Rule { - if m != nil { - return m.Rules - } - return nil -} - -func (m *RuleGroup) GetInterval() float64 { - if m != nil { - return m.Interval - } - return 0 -} - -func (m *RuleGroup) GetEvaluationDurationSeconds() float64 { - if m != nil { - return m.EvaluationDurationSeconds - } - return 0 -} - -func (m *RuleGroup) GetLastEvaluation() *Timestamp { - if m != nil { - return m.LastEvaluation - } - return nil -} - -func (m *RuleGroup) GetLimit() int64 { - if m != nil { - return m.Limit - } - return 0 -} - -func (m *RuleGroup) GetPartialResponseStrategy() storepb.PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return storepb.PartialResponseStrategy_WARN -} - type Rule struct { // Types that are valid to be assigned to Result: - // // *Rule_Recording // *Rule_Alert - Result isRule_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isRule_Result `protobuf_oneof:"result"` } func (m *Rule) Reset() { *m = Rule{} } @@ -531,22 +407,13 @@ func (*Rule) XXX_OneofWrappers() []interface{} { } type AlertInstance struct { - // @gotags: json:"labels" - Labels *labelpb.LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - // @gotags: json:"annotations" - Annotations *labelpb.LabelSet `protobuf:"bytes,2,opt,name=annotations,proto3" json:"annotations"` - // @gotags: json:"state" - State AlertState `protobuf:"varint,3,opt,name=state,proto3,enum=thanos.AlertState" json:"state"` - // @gotags: json:"activeAt,omitempty" - ActiveAt *Timestamp `protobuf:"bytes,4,opt,name=active_at,json=activeAt,proto3" json:"activeAt,omitempty"` - // @gotags: json:"value" - Value string `protobuf:"bytes,5,opt,name=value,proto3" json:"value"` + Labels labelpb.LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Annotations labelpb.LabelSet `protobuf:"bytes,2,opt,name=annotations,proto3" json:"annotations"` + State AlertState `protobuf:"varint,3,opt,name=state,proto3,enum=thanos.AlertState" json:"state"` + ActiveAt *time.Time `protobuf:"bytes,4,opt,name=active_at,json=activeAt,proto3,stdtime" json:"activeAt,omitempty"` + Value string `protobuf:"bytes,5,opt,name=value,proto3" json:"value"` // Thanos specific. Used mainly for alert API purposes. - // @gotags: json:"partialResponseStrategy" PartialResponseStrategy storepb.PartialResponseStrategy `protobuf:"varint,6,opt,name=PartialResponseStrategy,proto3,enum=thanos.PartialResponseStrategy" json:"partialResponseStrategy"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *AlertInstance) Reset() { *m = AlertInstance{} } @@ -582,77 +449,20 @@ func (m *AlertInstance) XXX_DiscardUnknown() { var xxx_messageInfo_AlertInstance proto.InternalMessageInfo -func (m *AlertInstance) GetLabels() *labelpb.LabelSet { - if m != nil { - return m.Labels - } - return nil -} - -func (m *AlertInstance) GetAnnotations() *labelpb.LabelSet { - if m != nil { - return m.Annotations - } - return nil -} - -func (m *AlertInstance) GetState() AlertState { - if m != nil { - return m.State - } - return AlertState_INACTIVE -} - -func (m *AlertInstance) GetActiveAt() *Timestamp { - if m != nil { - return m.ActiveAt - } - return nil -} - -func (m *AlertInstance) GetValue() string { - if m != nil { - return m.Value - } - return "" -} - -func (m *AlertInstance) GetPartialResponseStrategy() storepb.PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return storepb.PartialResponseStrategy_WARN -} - type Alert struct { - // / state returns the maximum state of alert instances for this rule. - // @gotags: json:"state" - State AlertState `protobuf:"varint,1,opt,name=state,proto3,enum=thanos.AlertState" json:"state"` - // @gotags: json:"name" - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name"` - // @gotags: json:"query" - Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query"` - // @gotags: json:"duration" - DurationSeconds float64 `protobuf:"fixed64,4,opt,name=duration_seconds,json=durationSeconds,proto3" json:"duration"` - // @gotags: json:"labels" - Labels *labelpb.LabelSet `protobuf:"bytes,5,opt,name=labels,proto3" json:"labels"` - // @gotags: json:"annotations" - Annotations *labelpb.LabelSet `protobuf:"bytes,6,opt,name=annotations,proto3" json:"annotations"` - // @gotags: json:"alerts" - Alerts []*AlertInstance `protobuf:"bytes,7,rep,name=alerts,proto3" json:"alerts"` - // @gotags: json:"health" - Health string `protobuf:"bytes,8,opt,name=health,proto3" json:"health"` - // @gotags: json:"lastError,omitempty" - LastError string `protobuf:"bytes,9,opt,name=last_error,json=lastError,proto3" json:"lastError,omitempty"` - // @gotags: json:"evaluationTime" - EvaluationDurationSeconds float64 `protobuf:"fixed64,10,opt,name=evaluation_duration_seconds,json=evaluationDurationSeconds,proto3" json:"evaluationTime"` - // @gotags: json:"lastEvaluation,omitempty" - LastEvaluation *Timestamp `protobuf:"bytes,11,opt,name=last_evaluation,json=lastEvaluation,proto3" json:"lastEvaluation,omitempty"` - // @gotags: json:"keepFiringFor" - KeepFiringForSeconds float64 `protobuf:"fixed64,12,opt,name=keep_firing_for_seconds,json=keepFiringForSeconds,proto3" json:"keepFiringFor"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + /// state returns the maximum state of alert instances for this rule. + State AlertState `protobuf:"varint,1,opt,name=state,proto3,enum=thanos.AlertState" json:"state"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query"` + DurationSeconds float64 `protobuf:"fixed64,4,opt,name=duration_seconds,json=durationSeconds,proto3" json:"duration"` + Labels labelpb.LabelSet `protobuf:"bytes,5,opt,name=labels,proto3" json:"labels"` + Annotations labelpb.LabelSet `protobuf:"bytes,6,opt,name=annotations,proto3" json:"annotations"` + Alerts []*AlertInstance `protobuf:"bytes,7,rep,name=alerts,proto3" json:"alerts"` + Health string `protobuf:"bytes,8,opt,name=health,proto3" json:"health"` + LastError string `protobuf:"bytes,9,opt,name=last_error,json=lastError,proto3" json:"lastError,omitempty"` + EvaluationDurationSeconds float64 `protobuf:"fixed64,10,opt,name=evaluation_duration_seconds,json=evaluationDurationSeconds,proto3" json:"evaluationTime"` + LastEvaluation time.Time `protobuf:"bytes,11,opt,name=last_evaluation,json=lastEvaluation,proto3,stdtime" json:"lastEvaluation"` + KeepFiringForSeconds float64 `protobuf:"fixed64,12,opt,name=keep_firing_for_seconds,json=keepFiringForSeconds,proto3" json:"keepFiringFor"` } func (m *Alert) Reset() { *m = Alert{} } @@ -688,108 +498,14 @@ func (m *Alert) XXX_DiscardUnknown() { var xxx_messageInfo_Alert proto.InternalMessageInfo -func (m *Alert) GetState() AlertState { - if m != nil { - return m.State - } - return AlertState_INACTIVE -} - -func (m *Alert) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *Alert) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -func (m *Alert) GetDurationSeconds() float64 { - if m != nil { - return m.DurationSeconds - } - return 0 -} - -func (m *Alert) GetLabels() *labelpb.LabelSet { - if m != nil { - return m.Labels - } - return nil -} - -func (m *Alert) GetAnnotations() *labelpb.LabelSet { - if m != nil { - return m.Annotations - } - return nil -} - -func (m *Alert) GetAlerts() []*AlertInstance { - if m != nil { - return m.Alerts - } - return nil -} - -func (m *Alert) GetHealth() string { - if m != nil { - return m.Health - } - return "" -} - -func (m *Alert) GetLastError() string { - if m != nil { - return m.LastError - } - return "" -} - -func (m *Alert) GetEvaluationDurationSeconds() float64 { - if m != nil { - return m.EvaluationDurationSeconds - } - return 0 -} - -func (m *Alert) GetLastEvaluation() *Timestamp { - if m != nil { - return m.LastEvaluation - } - return nil -} - -func (m *Alert) GetKeepFiringForSeconds() float64 { - if m != nil { - return m.KeepFiringForSeconds - } - return 0 -} - type RecordingRule struct { - // @gotags: json:"name" - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` - // @gotags: json:"query" - Query string `protobuf:"bytes,2,opt,name=query,proto3" json:"query"` - // @gotags: json:"labels" - Labels *labelpb.LabelSet `protobuf:"bytes,3,opt,name=labels,proto3" json:"labels"` - // @gotags: json:"health" - Health string `protobuf:"bytes,4,opt,name=health,proto3" json:"health"` - // @gotags: json:"lastError,omitempty" - LastError string `protobuf:"bytes,5,opt,name=last_error,json=lastError,proto3" json:"lastError,omitempty"` - // @gotags: json:"evaluationTime" - EvaluationDurationSeconds float64 `protobuf:"fixed64,6,opt,name=evaluation_duration_seconds,json=evaluationDurationSeconds,proto3" json:"evaluationTime"` - // @gotags: json:"lastEvaluation" - LastEvaluation *Timestamp `protobuf:"bytes,7,opt,name=last_evaluation,json=lastEvaluation,proto3" json:"lastEvaluation"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` + Query string `protobuf:"bytes,2,opt,name=query,proto3" json:"query"` + Labels labelpb.LabelSet `protobuf:"bytes,3,opt,name=labels,proto3" json:"labels"` + Health string `protobuf:"bytes,4,opt,name=health,proto3" json:"health"` + LastError string `protobuf:"bytes,5,opt,name=last_error,json=lastError,proto3" json:"lastError,omitempty"` + EvaluationDurationSeconds float64 `protobuf:"fixed64,6,opt,name=evaluation_duration_seconds,json=evaluationDurationSeconds,proto3" json:"evaluationTime"` + LastEvaluation time.Time `protobuf:"bytes,7,opt,name=last_evaluation,json=lastEvaluation,proto3,stdtime" json:"lastEvaluation"` } func (m *RecordingRule) Reset() { *m = RecordingRule{} } @@ -825,117 +541,6 @@ func (m *RecordingRule) XXX_DiscardUnknown() { var xxx_messageInfo_RecordingRule proto.InternalMessageInfo -func (m *RecordingRule) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *RecordingRule) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -func (m *RecordingRule) GetLabels() *labelpb.LabelSet { - if m != nil { - return m.Labels - } - return nil -} - -func (m *RecordingRule) GetHealth() string { - if m != nil { - return m.Health - } - return "" -} - -func (m *RecordingRule) GetLastError() string { - if m != nil { - return m.LastError - } - return "" -} - -func (m *RecordingRule) GetEvaluationDurationSeconds() float64 { - if m != nil { - return m.EvaluationDurationSeconds - } - return 0 -} - -func (m *RecordingRule) GetLastEvaluation() *Timestamp { - if m != nil { - return m.LastEvaluation - } - return nil -} - -type Timestamp struct { - // Represents seconds of UTC time since Unix epoch - // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - // 9999-12-31T23:59:59Z inclusive. - Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 - // inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Timestamp) Reset() { *m = Timestamp{} } -func (m *Timestamp) String() string { return proto.CompactTextString(m) } -func (*Timestamp) ProtoMessage() {} -func (*Timestamp) Descriptor() ([]byte, []int) { - return fileDescriptor_91b1d28f30eb5efb, []int{8} -} -func (m *Timestamp) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *Timestamp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Timestamp.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *Timestamp) XXX_Merge(src proto.Message) { - xxx_messageInfo_Timestamp.Merge(m, src) -} -func (m *Timestamp) XXX_Size() int { - return m.Size() -} -func (m *Timestamp) XXX_DiscardUnknown() { - xxx_messageInfo_Timestamp.DiscardUnknown(m) -} - -var xxx_messageInfo_Timestamp proto.InternalMessageInfo - -func (m *Timestamp) GetSeconds() int64 { - if m != nil { - return m.Seconds - } - return 0 -} - -func (m *Timestamp) GetNanos() int32 { - if m != nil { - return m.Nanos - } - return 0 -} - func init() { proto.RegisterEnum("thanos.AlertState", AlertState_name, AlertState_value) proto.RegisterEnum("thanos.RulesRequest_Type", RulesRequest_Type_name, RulesRequest_Type_value) @@ -947,72 +552,81 @@ func init() { proto.RegisterType((*AlertInstance)(nil), "thanos.AlertInstance") proto.RegisterType((*Alert)(nil), "thanos.Alert") proto.RegisterType((*RecordingRule)(nil), "thanos.RecordingRule") - proto.RegisterType((*Timestamp)(nil), "thanos.Timestamp") } func init() { proto.RegisterFile("rules/rulespb/rpc.proto", fileDescriptor_91b1d28f30eb5efb) } var fileDescriptor_91b1d28f30eb5efb = []byte{ - // 930 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xe1, 0x6e, 0xdb, 0x36, - 0x10, 0xb6, 0x64, 0x4b, 0xb6, 0xce, 0x71, 0xea, 0x11, 0xc9, 0xa2, 0xa4, 0x58, 0x66, 0x08, 0xe8, - 0xe0, 0x0c, 0xa8, 0x33, 0xb8, 0xe8, 0x06, 0x6c, 0xc0, 0x06, 0xb7, 0x71, 0x1a, 0x03, 0x41, 0x56, - 0xd0, 0xc1, 0x80, 0x6d, 0x3f, 0x3c, 0xc6, 0x61, 0x12, 0x61, 0xb2, 0xa4, 0x92, 0x74, 0x86, 0xbc, - 0xc8, 0x9e, 0x65, 0x7b, 0x83, 0xfd, 0xdc, 0x23, 0x14, 0x79, 0x8f, 0x01, 0x03, 0x8f, 0x92, 0x2d, - 0xb7, 0x6e, 0xe3, 0xa6, 0x7f, 0x0c, 0xf2, 0xbe, 0x4f, 0x77, 0xe2, 0x77, 0xf7, 0xd1, 0x82, 0x2d, - 0x31, 0x8d, 0xb8, 0xdc, 0xc7, 0xdf, 0xf4, 0x6c, 0x5f, 0xa4, 0xe3, 0x4e, 0x2a, 0x12, 0x95, 0x10, - 0x57, 0x5d, 0xb1, 0x38, 0x91, 0x3b, 0xdb, 0x52, 0x25, 0x82, 0xef, 0xe3, 0x6f, 0x7a, 0xb6, 0xaf, - 0x6e, 0x52, 0x2e, 0x0d, 0x25, 0x87, 0x22, 0x76, 0xc6, 0xa3, 0x45, 0x28, 0xf8, 0xdb, 0x86, 0x35, - 0xaa, 0x73, 0x52, 0xfe, 0x6a, 0xca, 0xa5, 0x22, 0x8f, 0xa1, 0xa2, 0x71, 0xdf, 0x6a, 0x59, 0xed, - 0xf5, 0xee, 0x76, 0xc7, 0x64, 0xef, 0x14, 0x39, 0x9d, 0xd3, 0x9b, 0x94, 0x53, 0xa4, 0x91, 0x5f, - 0x61, 0x3b, 0x65, 0x42, 0x85, 0x2c, 0x1a, 0x09, 0x2e, 0xd3, 0x24, 0x96, 0x7c, 0x24, 0x95, 0x60, - 0x8a, 0x5f, 0xde, 0xf8, 0x36, 0xe6, 0xf8, 0x3c, 0xcf, 0xf1, 0xd2, 0x10, 0x69, 0xc6, 0x1b, 0x66, - 0x34, 0xba, 0x95, 0x2e, 0x07, 0xc8, 0x23, 0x58, 0x9f, 0x30, 0x35, 0xbe, 0xe2, 0x42, 0xe7, 0x0c, - 0xe3, 0x4b, 0xbf, 0xdc, 0x2a, 0xb7, 0x3d, 0xda, 0xc8, 0xa2, 0x43, 0x0c, 0x92, 0x87, 0xe0, 0x69, - 0x59, 0x46, 0x31, 0x9b, 0x70, 0xbf, 0x82, 0x8c, 0x9a, 0x0e, 0x9c, 0xb0, 0x09, 0x27, 0x9f, 0x01, - 0x20, 0x78, 0x29, 0x92, 0x69, 0xea, 0x3b, 0x88, 0x22, 0xfd, 0x85, 0x0e, 0x10, 0x02, 0x95, 0x8b, - 0x30, 0xe2, 0xbe, 0x8b, 0x00, 0xae, 0x83, 0x2f, 0xa0, 0xa2, 0x4f, 0x48, 0xaa, 0x50, 0xee, 0x1d, - 0x1f, 0x37, 0x4b, 0xc4, 0x03, 0xa7, 0x77, 0xdc, 0xa7, 0xa7, 0x4d, 0x8b, 0x00, 0xb8, 0xb4, 0xff, - 0xfc, 0x47, 0x7a, 0xd0, 0xb4, 0x83, 0xdf, 0xa0, 0x91, 0xc9, 0x62, 0xde, 0x9b, 0xec, 0x81, 0x63, - 0xca, 0x68, 0xf1, 0xea, 0xdd, 0x4f, 0x8a, 0xe2, 0x61, 0xb9, 0xa3, 0x12, 0x35, 0x0c, 0xb2, 0x03, - 0xd5, 0x3f, 0x98, 0x88, 0xf5, 0x99, 0xb4, 0x4a, 0xde, 0x51, 0x89, 0xe6, 0x81, 0x67, 0x35, 0x70, - 0x05, 0x97, 0xd3, 0x48, 0x05, 0xdf, 0x00, 0xcc, 0x9e, 0x95, 0x64, 0x0f, 0x5c, 0x7c, 0x58, 0xfa, - 0x56, 0xab, 0xbc, 0x34, 0x3f, 0xcd, 0x08, 0xc1, 0x6b, 0x1b, 0x3c, 0x5a, 0x3c, 0x24, 0x6a, 0xa3, - 0x5f, 0xcb, 0xa3, 0xb8, 0x9e, 0x1d, 0xdc, 0x36, 0x31, 0xbd, 0x26, 0x01, 0x38, 0x38, 0x5f, 0x28, - 0x73, 0xbd, 0xbb, 0x56, 0xcc, 0x4f, 0x0d, 0x44, 0x76, 0xa0, 0x16, 0xc6, 0x8a, 0x8b, 0x6b, 0x16, - 0xf9, 0x95, 0x96, 0xd5, 0xb6, 0xe8, 0x6c, 0x4f, 0xbe, 0x87, 0x87, 0xfc, 0x9a, 0x45, 0x53, 0xa6, - 0xc2, 0x24, 0x1e, 0x9d, 0x4f, 0x85, 0x59, 0x48, 0x3e, 0x4e, 0xe2, 0x73, 0xe9, 0x3b, 0x48, 0xdf, - 0x9e, 0x53, 0x0e, 0x32, 0xc6, 0xd0, 0x10, 0xc8, 0xb7, 0xf0, 0x20, 0x62, 0x52, 0x8d, 0xe6, 0x0c, - 0xdf, 0x5d, 0x54, 0xf2, 0x34, 0x9c, 0x70, 0xa9, 0xd8, 0x24, 0xa5, 0xeb, 0x9a, 0xd9, 0x9f, 0x11, - 0xc9, 0x06, 0x38, 0x51, 0x38, 0x09, 0x95, 0xef, 0xb5, 0xac, 0x76, 0x99, 0x9a, 0x0d, 0xf9, 0x19, - 0xb6, 0xde, 0x31, 0x75, 0x7e, 0x6d, 0xc5, 0xe1, 0x7c, 0x07, 0x10, 0xc4, 0x50, 0xd1, 0xba, 0x90, - 0xa7, 0xe0, 0x09, 0x3e, 0x4e, 0xc4, 0xb9, 0xee, 0xa5, 0x69, 0xfc, 0xe6, 0x4c, 0xb8, 0x1c, 0xd0, - 0xcc, 0xa3, 0x12, 0x9d, 0x33, 0xc9, 0x23, 0x70, 0x58, 0xc4, 0x85, 0xc2, 0x06, 0xd4, 0xbb, 0x8d, - 0xfc, 0x91, 0x9e, 0x0e, 0xea, 0x39, 0x41, 0xb4, 0x30, 0x0b, 0x7f, 0xd9, 0xd0, 0x40, 0x70, 0x10, - 0x4b, 0xc5, 0xe2, 0x31, 0x27, 0x6d, 0x70, 0xd1, 0xd2, 0x32, 0x2b, 0xdb, 0xcc, 0x73, 0x1c, 0xeb, - 0xe8, 0x90, 0x2b, 0x9a, 0xe1, 0xa4, 0x0b, 0x75, 0x16, 0xc7, 0x89, 0x42, 0xa9, 0x64, 0x56, 0xf2, - 0x6d, 0x7a, 0x91, 0x44, 0xda, 0xe0, 0x48, 0xc5, 0x14, 0xf7, 0xcb, 0x28, 0x14, 0x59, 0x78, 0xc1, - 0xa1, 0x46, 0xa8, 0x21, 0x90, 0x0e, 0x78, 0x6c, 0xac, 0xc2, 0x6b, 0x3e, 0x62, 0x0a, 0x67, 0x62, - 0x69, 0xc3, 0x6a, 0x86, 0xd3, 0x53, 0xba, 0x55, 0xba, 0x6f, 0x1c, 0x07, 0xc2, 0xa3, 0x66, 0xf3, - 0xbe, 0x56, 0xb9, 0x1f, 0xd9, 0xaa, 0xff, 0xca, 0xe0, 0xe0, 0x6b, 0xcf, 0x0f, 0x65, 0xdd, 0x75, - 0xa8, 0xdc, 0x33, 0x76, 0xc1, 0x33, 0x1b, 0xe0, 0xbc, 0x9a, 0x72, 0x71, 0x83, 0x92, 0x78, 0xd4, - 0x6c, 0xc8, 0x1e, 0x34, 0xdf, 0x1a, 0x75, 0xe3, 0x8c, 0x07, 0xe7, 0x6f, 0x0c, 0xf8, 0xbc, 0x63, - 0xce, 0x87, 0x75, 0xcc, 0x5d, 0xa5, 0x63, 0x8f, 0xc1, 0xc5, 0xa1, 0x91, 0x7e, 0x15, 0xfd, 0xbb, - 0xb9, 0x70, 0xba, 0x7c, 0x6c, 0x68, 0x46, 0x22, 0x9f, 0x82, 0x7b, 0xc5, 0x59, 0xa4, 0xae, 0xd0, - 0x0a, 0x1e, 0xcd, 0x76, 0xfa, 0xc6, 0x34, 0x2e, 0x14, 0x22, 0x11, 0x68, 0x27, 0x8f, 0x7a, 0xe8, - 0x36, 0x1d, 0xb8, 0xcb, 0xe4, 0x70, 0x0f, 0x93, 0xd7, 0x57, 0x35, 0xf9, 0x53, 0xd8, 0xfa, 0x9d, - 0xf3, 0x74, 0x74, 0x11, 0xea, 0x8b, 0x7f, 0x74, 0x91, 0x88, 0x59, 0xdd, 0x35, 0xac, 0xbb, 0xa1, - 0xe1, 0x43, 0x44, 0x0f, 0x13, 0x91, 0x95, 0x0c, 0xfe, 0xb4, 0xa1, 0xb1, 0x60, 0xc5, 0xa5, 0x37, - 0xe2, 0xac, 0xbb, 0x76, 0xb1, 0xbb, 0xf3, 0x96, 0x95, 0xef, 0x68, 0xd9, 0x5c, 0xcf, 0xca, 0x7b, - 0xf4, 0x74, 0x3e, 0x50, 0x4f, 0xf7, 0x1e, 0x7a, 0x56, 0x57, 0xd4, 0x33, 0xf8, 0x0e, 0xbc, 0x19, - 0x48, 0x7c, 0xa8, 0xe6, 0x45, 0x2d, 0xbc, 0x43, 0xf3, 0xad, 0x56, 0x26, 0xd6, 0x99, 0x50, 0x19, - 0x87, 0x9a, 0xcd, 0x97, 0x4f, 0x00, 0xe6, 0xb6, 0x21, 0x6b, 0x50, 0x1b, 0x9c, 0xf4, 0x9e, 0x9f, - 0x0e, 0x7e, 0xea, 0x37, 0x4b, 0xa4, 0x0e, 0xd5, 0x97, 0xfd, 0x93, 0x83, 0xc1, 0xc9, 0x0b, 0xf3, - 0x9f, 0x79, 0x38, 0xa0, 0x7a, 0x6d, 0x77, 0x7f, 0x00, 0x07, 0xff, 0x33, 0xc9, 0xd7, 0xf9, 0x62, - 0x63, 0xd9, 0x27, 0xc6, 0xce, 0xe6, 0x1b, 0x51, 0xe3, 0xe8, 0xaf, 0xac, 0x67, 0x9b, 0xff, 0xdc, - 0xee, 0x5a, 0xff, 0xde, 0xee, 0x5a, 0xaf, 0x6f, 0x77, 0xad, 0x5f, 0xaa, 0xd9, 0xf7, 0xd0, 0x99, - 0x8b, 0x9f, 0x33, 0x4f, 0xfe, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x02, 0xfc, 0x7c, 0x3f, 0x27, 0x09, - 0x00, 0x00, + // 1098 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xcd, 0x6e, 0x23, 0x45, + 0x10, 0xf6, 0xd8, 0x9e, 0xb1, 0xa7, 0x1c, 0x67, 0xbd, 0xbd, 0xbb, 0xca, 0x24, 0x0b, 0x9e, 0xc8, + 0x52, 0x50, 0x40, 0xac, 0x8d, 0x12, 0xed, 0x02, 0x27, 0x14, 0xe7, 0x5f, 0x8a, 0xc2, 0xaa, 0x1d, + 0x71, 0x80, 0x83, 0xe9, 0x38, 0x1d, 0x67, 0xc4, 0x78, 0x66, 0xb6, 0xa7, 0x1d, 0x94, 0xb7, 0xd8, + 0x2b, 0x0f, 0xc2, 0x81, 0x37, 0xc8, 0x05, 0x69, 0x8f, 0x9c, 0x0c, 0x24, 0x37, 0x1f, 0x78, 0x06, + 0xd4, 0xd5, 0x33, 0x1e, 0x27, 0x9b, 0x90, 0x5d, 0x08, 0x17, 0x77, 0xf7, 0x57, 0x5f, 0xf5, 0x4f, + 0xd5, 0x57, 0xe5, 0x81, 0x39, 0x31, 0xf4, 0x79, 0xdc, 0xc2, 0xdf, 0xe8, 0xb0, 0x25, 0xa2, 0x5e, + 0x33, 0x12, 0xa1, 0x0c, 0x89, 0x25, 0x4f, 0x58, 0x10, 0xc6, 0x0b, 0xf3, 0xb1, 0x0c, 0x05, 0x6f, + 0xe1, 0x6f, 0x74, 0xd8, 0x92, 0x67, 0x11, 0x8f, 0x35, 0x25, 0x35, 0xf9, 0xec, 0x90, 0xfb, 0xd7, + 0x4c, 0x8f, 0xfb, 0x61, 0x3f, 0xc4, 0x69, 0x4b, 0xcd, 0x12, 0xd4, 0xed, 0x87, 0x61, 0xdf, 0xe7, + 0x2d, 0x5c, 0x1d, 0x0e, 0x8f, 0x5b, 0xd2, 0x1b, 0xf0, 0x58, 0xb2, 0x41, 0xa4, 0x09, 0x8d, 0x5f, + 0xf2, 0x30, 0x43, 0xd5, 0x55, 0x28, 0x7f, 0x35, 0xe4, 0xb1, 0x24, 0xcf, 0xa0, 0xa8, 0xb6, 0x75, + 0x8c, 0x45, 0x63, 0x79, 0x76, 0x65, 0xbe, 0xa9, 0x2f, 0xd5, 0x9c, 0xe6, 0x34, 0x0f, 0xce, 0x22, + 0x4e, 0x91, 0x46, 0xbe, 0x83, 0xf9, 0x88, 0x09, 0xe9, 0x31, 0xbf, 0x2b, 0x78, 0x1c, 0x85, 0x41, + 0xcc, 0xbb, 0xb1, 0x14, 0x4c, 0xf2, 0xfe, 0x99, 0x93, 0xc7, 0x3d, 0xdc, 0x74, 0x8f, 0x97, 0x9a, + 0x48, 0x13, 0x5e, 0x27, 0xa1, 0xd1, 0xb9, 0xe8, 0x66, 0x03, 0x59, 0x82, 0xd9, 0x01, 0x93, 0xbd, + 0x13, 0x2e, 0xd4, 0x9e, 0x5e, 0xd0, 0x77, 0x0a, 0x8b, 0x85, 0x65, 0x9b, 0x56, 0x13, 0xb4, 0x83, + 0x20, 0x79, 0x0a, 0xb6, 0x8a, 0x66, 0x37, 0x60, 0x03, 0xee, 0x14, 0x91, 0x51, 0x56, 0xc0, 0x3e, + 0x1b, 0x70, 0xf2, 0x21, 0x00, 0x1a, 0xfb, 0x22, 0x1c, 0x46, 0x8e, 0x89, 0x56, 0xa4, 0x6f, 0x2b, + 0x80, 0x10, 0x28, 0x1e, 0x7b, 0x3e, 0x77, 0x2c, 0x34, 0xe0, 0xbc, 0xf1, 0x11, 0x14, 0xd5, 0x0b, + 0x49, 0x09, 0x0a, 0x6b, 0x7b, 0x7b, 0xb5, 0x1c, 0xb1, 0xc1, 0x5c, 0xdb, 0xdb, 0xa4, 0x07, 0x35, + 0x83, 0x00, 0x58, 0x74, 0x73, 0xfd, 0x6b, 0xba, 0x51, 0xcb, 0x37, 0xbe, 0x87, 0x6a, 0x12, 0x16, + 0x7d, 0x6f, 0xf2, 0x31, 0x98, 0xfa, 0x18, 0x15, 0xbc, 0xca, 0xca, 0xc3, 0xe9, 0xe0, 0xe1, 0x71, + 0x3b, 0x39, 0xaa, 0x19, 0x64, 0x01, 0x4a, 0x3f, 0x32, 0x11, 0xa8, 0x37, 0xa9, 0x28, 0xd9, 0x3b, + 0x39, 0x9a, 0x02, 0xed, 0x32, 0x58, 0x82, 0xc7, 0x43, 0x5f, 0x36, 0xd6, 0x01, 0x26, 0xbe, 0x31, + 0x79, 0x0e, 0x16, 0x3a, 0xc7, 0x8e, 0xb1, 0x58, 0xb8, 0x71, 0xff, 0x36, 0x8c, 0x47, 0x6e, 0x42, + 0xa2, 0xc9, 0xd8, 0xf8, 0xab, 0x00, 0xf6, 0x84, 0x41, 0x3e, 0x80, 0x22, 0xc6, 0x49, 0x5d, 0xd1, + 0x6e, 0x97, 0xc7, 0x23, 0x17, 0xd7, 0x14, 0x7f, 0x95, 0x15, 0xc3, 0x91, 0xcf, 0xac, 0x6a, 0xad, + 0x03, 0x43, 0x9e, 0x81, 0x89, 0xb2, 0xc5, 0x34, 0x54, 0x56, 0x66, 0xa6, 0xcf, 0x6f, 0xdb, 0xe3, + 0x91, 0xab, 0xcd, 0x54, 0x0f, 0x64, 0x19, 0xca, 0x5e, 0x20, 0xb9, 0x38, 0x65, 0xbe, 0x53, 0x5c, + 0x34, 0x96, 0x8d, 0xf6, 0xcc, 0x78, 0xe4, 0x4e, 0x30, 0x3a, 0x99, 0x11, 0x0a, 0x4f, 0xf9, 0x29, + 0xf3, 0x87, 0x4c, 0x7a, 0x61, 0xd0, 0x3d, 0x1a, 0x0a, 0x3d, 0x89, 0x79, 0x2f, 0x0c, 0x8e, 0x62, + 0xc7, 0x44, 0x67, 0x32, 0x1e, 0xb9, 0xb3, 0x19, 0xed, 0xc0, 0x1b, 0x70, 0x3a, 0x9f, 0xad, 0x37, + 0x12, 0xaf, 0x8e, 0x76, 0x22, 0x5d, 0x78, 0xe0, 0xb3, 0x58, 0x76, 0x33, 0x86, 0x63, 0x61, 0x5a, + 0x16, 0x9a, 0xba, 0x28, 0x9a, 0x69, 0x51, 0x34, 0x0f, 0xd2, 0xa2, 0x68, 0x2f, 0x9c, 0x8f, 0xdc, + 0x9c, 0x3a, 0x47, 0xb9, 0x6e, 0x4e, 0x3c, 0x5f, 0xff, 0xee, 0x1a, 0xf4, 0x1a, 0x46, 0x5c, 0x30, + 0x7d, 0x6f, 0xe0, 0x49, 0xc7, 0x5e, 0x34, 0x96, 0x0b, 0xfa, 0xfd, 0x08, 0x50, 0x3d, 0x90, 0x53, + 0x98, 0xbb, 0x45, 0xf2, 0x4e, 0xf9, 0x9d, 0x2a, 0xa3, 0xfd, 0x74, 0x3c, 0x72, 0x6f, 0xab, 0x0e, + 0x7a, 0xdb, 0xe6, 0x8d, 0x00, 0x8a, 0x2a, 0x23, 0xe4, 0x39, 0xd8, 0x82, 0xf7, 0x42, 0x71, 0xa4, + 0x54, 0xa6, 0x25, 0xf9, 0x64, 0x92, 0xb2, 0xd4, 0xa0, 0x98, 0x3b, 0x39, 0x9a, 0x31, 0xc9, 0x12, + 0x98, 0xcc, 0xe7, 0x42, 0xa2, 0x08, 0x2a, 0x2b, 0xd5, 0xd4, 0x65, 0x4d, 0x81, 0x4a, 0xc1, 0x68, + 0x9d, 0x52, 0xe9, 0xcf, 0x05, 0xa8, 0xa2, 0x71, 0x37, 0x88, 0x25, 0x0b, 0x7a, 0x9c, 0x7c, 0x01, + 0x16, 0xf6, 0xa8, 0x38, 0x39, 0xb6, 0x96, 0xee, 0xb1, 0xa7, 0xd0, 0x0e, 0x97, 0xed, 0xd9, 0x24, + 0xd0, 0x09, 0x8f, 0x26, 0x23, 0xd9, 0x86, 0x0a, 0x0b, 0x82, 0x50, 0x62, 0x88, 0xe3, 0xe4, 0x0a, + 0x6f, 0xbb, 0x3f, 0x4a, 0xdc, 0xa7, 0xc9, 0x74, 0x7a, 0x41, 0x56, 0xc1, 0x8c, 0x25, 0x93, 0xdc, + 0x29, 0x60, 0xa8, 0xc9, 0x95, 0x57, 0x74, 0x94, 0x45, 0x67, 0x0c, 0x49, 0x54, 0x0f, 0xa4, 0x03, + 0x36, 0xeb, 0x49, 0xef, 0x94, 0x77, 0x99, 0x44, 0xc9, 0xde, 0xa1, 0x96, 0xf1, 0xc8, 0x25, 0xda, + 0x61, 0x4d, 0x7e, 0x1a, 0x0e, 0x3c, 0xc9, 0x07, 0x91, 0x3c, 0x43, 0xb5, 0x94, 0x53, 0x5c, 0xe9, + 0x44, 0x89, 0x86, 0xa3, 0x8c, 0x6d, 0x7d, 0x2a, 0x02, 0x54, 0x0f, 0xff, 0xa4, 0x13, 0xeb, 0xff, + 0xd4, 0xc9, 0xaf, 0x26, 0x98, 0x18, 0x8e, 0x2c, 0x58, 0xc6, 0x7b, 0x04, 0x2b, 0xed, 0x24, 0xf9, + 0x1b, 0x3b, 0x89, 0x0b, 0xe6, 0xab, 0x21, 0x17, 0x67, 0x18, 0xff, 0xe4, 0xd5, 0x08, 0x50, 0x3d, + 0x90, 0xcf, 0xa1, 0xf6, 0x56, 0xa1, 0x4f, 0x75, 0x89, 0xd4, 0x46, 0x1f, 0x1c, 0x5d, 0x2b, 0xec, + 0x4c, 0x5c, 0xe6, 0x7f, 0x13, 0x97, 0xf5, 0xaf, 0xc5, 0xf5, 0x25, 0x58, 0x58, 0x04, 0xb1, 0x53, + 0xc2, 0x4e, 0xf8, 0xe4, 0x4a, 0xc0, 0xd2, 0x32, 0xd0, 0xdd, 0x58, 0x13, 0x69, 0x32, 0x92, 0x06, + 0x58, 0x27, 0x9c, 0xf9, 0xf2, 0x04, 0x7b, 0x80, 0xad, 0x39, 0x1a, 0xa1, 0xc9, 0x48, 0x5e, 0x00, + 0xe8, 0xd6, 0x25, 0x44, 0x28, 0xb0, 0xbd, 0xd8, 0xed, 0xb9, 0xf1, 0xc8, 0x7d, 0x84, 0x1d, 0x48, + 0x81, 0x99, 0xd8, 0xa8, 0x3d, 0x01, 0xef, 0x6a, 0xa3, 0x70, 0x4f, 0x6d, 0xb4, 0x72, 0xaf, 0x6d, + 0x74, 0x07, 0xe6, 0x7e, 0xe0, 0x3c, 0xea, 0x1e, 0x7b, 0xea, 0xcf, 0xbc, 0x7b, 0x1c, 0x8a, 0xc9, + 0x85, 0x67, 0xf0, 0xc2, 0x0f, 0xc7, 0x23, 0xb7, 0xaa, 0x28, 0x5b, 0xc8, 0xd8, 0x0a, 0x05, 0x7d, + 0x7c, 0x65, 0x99, 0x5c, 0xb5, 0xf1, 0x53, 0x01, 0xaa, 0x57, 0xfa, 0xda, 0x1d, 0x7f, 0x76, 0x13, + 0x89, 0xe6, 0x6f, 0x91, 0x68, 0xa6, 0xb4, 0xc2, 0x7b, 0x2a, 0x2d, 0xcb, 0x72, 0xf1, 0x1d, 0xb3, + 0x6c, 0xde, 0x57, 0x96, 0xad, 0x7b, 0xca, 0x72, 0xe9, 0x3e, 0xb3, 0xfc, 0xc9, 0x2a, 0x40, 0xd6, + 0x4c, 0xc8, 0x0c, 0x94, 0x77, 0xf7, 0xd7, 0xd6, 0x0f, 0x76, 0xbf, 0xd9, 0xac, 0xe5, 0x48, 0x05, + 0x4a, 0x2f, 0x37, 0xf7, 0x37, 0x76, 0xf7, 0xb7, 0xf5, 0x07, 0xd6, 0xd6, 0x2e, 0x55, 0xf3, 0xfc, + 0xca, 0x57, 0x60, 0xe2, 0x07, 0x16, 0x79, 0x91, 0x4e, 0x1e, 0xdf, 0xf4, 0x3d, 0xba, 0xf0, 0xe4, + 0x1a, 0xaa, 0xfb, 0xdc, 0x67, 0x46, 0x7b, 0xe9, 0xfc, 0xcf, 0x7a, 0xee, 0xfc, 0xa2, 0x6e, 0xbc, + 0xb9, 0xa8, 0x1b, 0x7f, 0x5c, 0xd4, 0x8d, 0xd7, 0x97, 0xf5, 0xdc, 0x9b, 0xcb, 0x7a, 0xee, 0xb7, + 0xcb, 0x7a, 0xee, 0xdb, 0x52, 0xf2, 0x0d, 0x7e, 0x68, 0xe1, 0xe3, 0x56, 0xff, 0x0e, 0x00, 0x00, + 0xff, 0xff, 0xdc, 0x00, 0x20, 0x9a, 0x9b, 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1027,8 +641,8 @@ const _ = grpc.SupportPackageIsVersion4 // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type RulesClient interface { - // / Rules has info for all rules. - // / Returned rules are expected to include external labels. + /// Rules has info for all rules. + /// Returned rules are expected to include external labels. Rules(ctx context.Context, in *RulesRequest, opts ...grpc.CallOption) (Rules_RulesClient, error) } @@ -1074,8 +688,8 @@ func (x *rulesRulesClient) Recv() (*RulesResponse, error) { // RulesServer is the server API for Rules service. type RulesServer interface { - // / Rules has info for all rules. - // / Returned rules are expected to include external labels. + /// Rules has info for all rules. + /// Returned rules are expected to include external labels. Rules(*RulesRequest, Rules_RulesServer) error } @@ -1146,10 +760,6 @@ func (m *RulesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.File) > 0 { for iNdEx := len(m.File) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.File[iNdEx]) @@ -1219,10 +829,6 @@ func (m *RulesResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -1290,10 +896,6 @@ func (m *RuleGroups) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Groups) > 0 { for iNdEx := len(m.Groups) - 1; iNdEx >= 0; iNdEx-- { { @@ -1331,10 +933,6 @@ func (m *RuleGroup) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Limit != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Limit)) i-- @@ -1345,18 +943,14 @@ func (m *RuleGroup) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x40 } - if m.LastEvaluation != nil { - { - size, err := m.LastEvaluation.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x32 + n2, err2 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastEvaluation, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.LastEvaluation):]) + if err2 != nil { + return 0, err2 } + i -= n2 + i = encodeVarintRpc(dAtA, i, uint64(n2)) + i-- + dAtA[i] = 0x32 if m.EvaluationDurationSeconds != 0 { i -= 8 encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(math.Float64bits(float64(m.EvaluationDurationSeconds)))) @@ -1420,10 +1014,6 @@ func (m *Rule) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -1498,10 +1088,6 @@ func (m *AlertInstance) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.PartialResponseStrategy != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.PartialResponseStrategy)) i-- @@ -1515,14 +1101,12 @@ func (m *AlertInstance) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x2a } if m.ActiveAt != nil { - { - size, err := m.ActiveAt.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + n5, err5 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.ActiveAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.ActiveAt):]) + if err5 != nil { + return 0, err5 } + i -= n5 + i = encodeVarintRpc(dAtA, i, uint64(n5)) i-- dAtA[i] = 0x22 } @@ -1531,30 +1115,26 @@ func (m *AlertInstance) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x18 } - if m.Annotations != nil { - { - size, err := m.Annotations.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.Annotations.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } - if m.Labels != nil { - { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x12 + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0xa + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -1578,28 +1158,20 @@ func (m *Alert) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.KeepFiringForSeconds != 0 { i -= 8 encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(math.Float64bits(float64(m.KeepFiringForSeconds)))) i-- dAtA[i] = 0x61 } - if m.LastEvaluation != nil { - { - size, err := m.LastEvaluation.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x5a + n8, err8 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastEvaluation, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.LastEvaluation):]) + if err8 != nil { + return 0, err8 } + i -= n8 + i = encodeVarintRpc(dAtA, i, uint64(n8)) + i-- + dAtA[i] = 0x5a if m.EvaluationDurationSeconds != 0 { i -= 8 encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(math.Float64bits(float64(m.EvaluationDurationSeconds)))) @@ -1634,30 +1206,26 @@ func (m *Alert) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x3a } } - if m.Annotations != nil { - { - size, err := m.Annotations.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.Annotations.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x32 + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } - if m.Labels != nil { - { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x32 + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x2a + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x2a if m.DurationSeconds != 0 { i -= 8 encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(math.Float64bits(float64(m.DurationSeconds)))) @@ -1706,22 +1274,14 @@ func (m *RecordingRule) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.LastEvaluation != nil { - { - size, err := m.LastEvaluation.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x3a + n11, err11 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastEvaluation, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.LastEvaluation):]) + if err11 != nil { + return 0, err11 } + i -= n11 + i = encodeVarintRpc(dAtA, i, uint64(n11)) + i-- + dAtA[i] = 0x3a if m.EvaluationDurationSeconds != 0 { i -= 8 encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(math.Float64bits(float64(m.EvaluationDurationSeconds)))) @@ -1742,18 +1302,16 @@ func (m *RecordingRule) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x22 } - if m.Labels != nil { - { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x1a + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0x1a if len(m.Query) > 0 { i -= len(m.Query) copy(dAtA[i:], m.Query) @@ -1771,43 +1329,6 @@ func (m *RecordingRule) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *Timestamp) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *Timestamp) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *Timestamp) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.Nanos != 0 { - i = encodeVarintRpc(dAtA, i, uint64(m.Nanos)) - i-- - dAtA[i] = 0x10 - } - if m.Seconds != 0 { - i = encodeVarintRpc(dAtA, i, uint64(m.Seconds)) - i-- - dAtA[i] = 0x8 - } - return len(dAtA) - i, nil -} - func encodeVarintRpc(dAtA []byte, offset int, v uint64) int { offset -= sovRpc(v) base := offset @@ -1855,9 +1376,6 @@ func (m *RulesRequest) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1870,9 +1388,6 @@ func (m *RulesResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1910,9 +1425,6 @@ func (m *RuleGroups) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1942,19 +1454,14 @@ func (m *RuleGroup) Size() (n int) { if m.EvaluationDurationSeconds != 0 { n += 9 } - if m.LastEvaluation != nil { - l = m.LastEvaluation.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.LastEvaluation) + n += 1 + l + sovRpc(uint64(l)) if m.PartialResponseStrategy != 0 { n += 1 + sovRpc(uint64(m.PartialResponseStrategy)) } if m.Limit != 0 { n += 1 + sovRpc(uint64(m.Limit)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1967,9 +1474,6 @@ func (m *Rule) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2003,19 +1507,15 @@ func (m *AlertInstance) Size() (n int) { } var l int _ = l - if m.Labels != nil { - l = m.Labels.Size() - n += 1 + l + sovRpc(uint64(l)) - } - if m.Annotations != nil { - l = m.Annotations.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.Labels.Size() + n += 1 + l + sovRpc(uint64(l)) + l = m.Annotations.Size() + n += 1 + l + sovRpc(uint64(l)) if m.State != 0 { n += 1 + sovRpc(uint64(m.State)) } if m.ActiveAt != nil { - l = m.ActiveAt.Size() + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.ActiveAt) n += 1 + l + sovRpc(uint64(l)) } l = len(m.Value) @@ -2025,9 +1525,6 @@ func (m *AlertInstance) Size() (n int) { if m.PartialResponseStrategy != 0 { n += 1 + sovRpc(uint64(m.PartialResponseStrategy)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2051,14 +1548,10 @@ func (m *Alert) Size() (n int) { if m.DurationSeconds != 0 { n += 9 } - if m.Labels != nil { - l = m.Labels.Size() - n += 1 + l + sovRpc(uint64(l)) - } - if m.Annotations != nil { - l = m.Annotations.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.Labels.Size() + n += 1 + l + sovRpc(uint64(l)) + l = m.Annotations.Size() + n += 1 + l + sovRpc(uint64(l)) if len(m.Alerts) > 0 { for _, e := range m.Alerts { l = e.Size() @@ -2076,16 +1569,11 @@ func (m *Alert) Size() (n int) { if m.EvaluationDurationSeconds != 0 { n += 9 } - if m.LastEvaluation != nil { - l = m.LastEvaluation.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.LastEvaluation) + n += 1 + l + sovRpc(uint64(l)) if m.KeepFiringForSeconds != 0 { n += 9 } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2103,10 +1591,8 @@ func (m *RecordingRule) Size() (n int) { if l > 0 { n += 1 + l + sovRpc(uint64(l)) } - if m.Labels != nil { - l = m.Labels.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.Labels.Size() + n += 1 + l + sovRpc(uint64(l)) l = len(m.Health) if l > 0 { n += 1 + l + sovRpc(uint64(l)) @@ -2118,31 +1604,8 @@ func (m *RecordingRule) Size() (n int) { if m.EvaluationDurationSeconds != 0 { n += 9 } - if m.LastEvaluation != nil { - l = m.LastEvaluation.Size() - n += 1 + l + sovRpc(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - -func (m *Timestamp) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.Seconds != 0 { - n += 1 + sovRpc(uint64(m.Seconds)) - } - if m.Nanos != 0 { - n += 1 + sovRpc(uint64(m.Nanos)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.LastEvaluation) + n += 1 + l + sovRpc(uint64(l)) return n } @@ -2359,7 +1822,6 @@ func (m *RulesRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2477,7 +1939,6 @@ func (m *RulesResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2562,7 +2023,6 @@ func (m *RuleGroups) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2750,10 +2210,7 @@ func (m *RuleGroup) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.LastEvaluation == nil { - m.LastEvaluation = &Timestamp{} - } - if err := m.LastEvaluation.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.LastEvaluation, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -2807,7 +2264,6 @@ func (m *RuleGroup) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2928,7 +2384,6 @@ func (m *Rule) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2996,9 +2451,6 @@ func (m *AlertInstance) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Labels == nil { - m.Labels = &labelpb.LabelSet{} - } if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3032,9 +2484,6 @@ func (m *AlertInstance) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Annotations == nil { - m.Annotations = &labelpb.LabelSet{} - } if err := m.Annotations.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3088,9 +2537,9 @@ func (m *AlertInstance) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.ActiveAt == nil { - m.ActiveAt = &Timestamp{} + m.ActiveAt = new(time.Time) } - if err := m.ActiveAt.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.ActiveAt, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -3157,7 +2606,6 @@ func (m *AlertInstance) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3319,9 +2767,6 @@ func (m *Alert) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Labels == nil { - m.Labels = &labelpb.LabelSet{} - } if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3355,9 +2800,6 @@ func (m *Alert) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Annotations == nil { - m.Annotations = &labelpb.LabelSet{} - } if err := m.Annotations.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3500,10 +2942,7 @@ func (m *Alert) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.LastEvaluation == nil { - m.LastEvaluation = &Timestamp{} - } - if err := m.LastEvaluation.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.LastEvaluation, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -3530,7 +2969,6 @@ func (m *Alert) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3662,9 +3100,6 @@ func (m *RecordingRule) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Labels == nil { - m.Labels = &labelpb.LabelSet{} - } if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3773,10 +3208,7 @@ func (m *RecordingRule) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.LastEvaluation == nil { - m.LastEvaluation = &Timestamp{} - } - if err := m.LastEvaluation.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.LastEvaluation, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -3792,96 +3224,6 @@ func (m *RecordingRule) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *Timestamp) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Timestamp: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Timestamp: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Seconds", wireType) - } - m.Seconds = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Seconds |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Nanos", wireType) - } - m.Nanos = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRpc - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Nanos |= int32(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipRpc(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthRpc - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/rules/rulespb/rpc.proto b/pkg/rules/rulespb/rpc.proto index f0cb6f2926..be154201d1 100644 --- a/pkg/rules/rulespb/rpc.proto +++ b/pkg/rules/rulespb/rpc.proto @@ -6,9 +6,22 @@ package thanos; import "store/storepb/types.proto"; import "store/labelpb/types.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; option go_package = "rulespb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + /// Rules represents API that is responsible for gathering rules and their statuses. service Rules { /// Rules has info for all rules. @@ -52,30 +65,21 @@ message RulesResponse { /// /// For rule parsing from YAML configuration other struct is used: https://github.com/prometheus/prometheus/blob/20b1f596f6fb16107ef0c244d240b0ad6da36829/pkg/rulefmt/rulefmt.go#L105 message RuleGroups { - // @gotags: json:"groups" - repeated RuleGroup groups = 1; + repeated RuleGroup groups = 1 [(gogoproto.jsontag) = "groups" ]; } /// RuleGroup has info for rules which are part of a group. message RuleGroup { - // @gotags: json:"name" - string name = 1; - // @gotags: json:"file" - string file = 2; - // @gotags: json:"rules" - repeated Rule rules = 3; - // @gotags: json:"interval" - double interval = 4; - // @gotags: json:"evaluationTime" - double evaluation_duration_seconds = 5; // TODO: Is it really second? - // @gotags: json:"lastEvaluation" - Timestamp last_evaluation = 6; - // @gotags: json:"limit" - int64 limit = 9; + string name = 1 [(gogoproto.jsontag) = "name" ]; + string file = 2 [(gogoproto.jsontag) = "file" ]; + repeated Rule rules = 3 [(gogoproto.jsontag) = "rules" ]; + double interval = 4 [(gogoproto.jsontag) = "interval" ]; + double evaluation_duration_seconds = 5 [(gogoproto.jsontag) = "evaluationTime" ]; // TODO: Is it really second? + google.protobuf.Timestamp last_evaluation = 6 [(gogoproto.jsontag) = "lastEvaluation", (gogoproto.stdtime) = true, (gogoproto.nullable) = false ]; + int64 limit = 9 [(gogoproto.jsontag) = "limit" ]; // Thanos specific. - // @gotags: json:"partialResponseStrategy" - PartialResponseStrategy PartialResponseStrategy = 8; + PartialResponseStrategy PartialResponseStrategy = 8 [(gogoproto.jsontag) = "partialResponseStrategy" ]; } message Rule { @@ -102,76 +106,38 @@ enum AlertState { } message AlertInstance { - // @gotags: json:"labels" - LabelSet labels = 1; - // @gotags: json:"annotations" - LabelSet annotations = 2; - // @gotags: json:"state" - AlertState state = 3; - // @gotags: json:"activeAt,omitempty" - Timestamp active_at = 4; - // @gotags: json:"value" - string value = 5; + LabelSet labels = 1 [(gogoproto.jsontag) = "labels", (gogoproto.nullable) = false ]; + LabelSet annotations = 2 [(gogoproto.jsontag) = "annotations", (gogoproto.nullable) = false ]; + AlertState state = 3 [(gogoproto.jsontag) = "state" ]; + google.protobuf.Timestamp active_at = 4 [(gogoproto.jsontag) = "activeAt,omitempty", (gogoproto.stdtime) = true]; + string value = 5 [(gogoproto.jsontag) = "value" ]; // Thanos specific. Used mainly for alert API purposes. - // @gotags: json:"partialResponseStrategy" - PartialResponseStrategy PartialResponseStrategy = 6; + PartialResponseStrategy PartialResponseStrategy = 6 [(gogoproto.jsontag) = "partialResponseStrategy" ]; } message Alert { /// state returns the maximum state of alert instances for this rule. - // @gotags: json:"state" - AlertState state = 1; - // @gotags: json:"name" - string name = 2; - // @gotags: json:"query" - string query = 3; - // @gotags: json:"duration" - double duration_seconds = 4; - // @gotags: json:"labels" - LabelSet labels = 5; - // @gotags: json:"annotations" - LabelSet annotations = 6; - // @gotags: json:"alerts" - repeated AlertInstance alerts = 7; - // @gotags: json:"health" - string health = 8; - // @gotags: json:"lastError,omitempty" - string last_error = 9; - // @gotags: json:"evaluationTime" - double evaluation_duration_seconds = 10; - // @gotags: json:"lastEvaluation,omitempty" - Timestamp last_evaluation = 11; - // @gotags: json:"keepFiringFor" - double keep_firing_for_seconds = 12; + AlertState state = 1 [(gogoproto.jsontag) = "state" ]; + string name = 2 [(gogoproto.jsontag) = "name" ]; + string query = 3 [(gogoproto.jsontag) = "query" ]; + double duration_seconds = 4 [(gogoproto.jsontag) = "duration" ]; + LabelSet labels = 5 [(gogoproto.jsontag) = "labels", (gogoproto.nullable) = false ]; + LabelSet annotations = 6 [(gogoproto.jsontag) = "annotations", (gogoproto.nullable) = false ]; + repeated AlertInstance alerts = 7 [(gogoproto.jsontag) = "alerts" ]; + string health = 8 [(gogoproto.jsontag) = "health" ]; + string last_error = 9 [(gogoproto.jsontag) = "lastError,omitempty" ]; + double evaluation_duration_seconds = 10 [(gogoproto.jsontag) = "evaluationTime" ]; + google.protobuf.Timestamp last_evaluation = 11 [(gogoproto.jsontag) = "lastEvaluation", (gogoproto.stdtime) = true, (gogoproto.nullable) = false ]; + double keep_firing_for_seconds = 12 [(gogoproto.jsontag) = "keepFiringFor" ]; } message RecordingRule { - // @gotags: json:"name" - string name = 1; - // @gotags: json:"query" - string query = 2; - // @gotags: json:"labels" - LabelSet labels = 3; - // @gotags: json:"health" - string health = 4; - // @gotags: json:"lastError,omitempty" - string last_error = 5; - // @gotags: json:"evaluationTime" - double evaluation_duration_seconds = 6; - // @gotags: json:"lastEvaluation" - Timestamp last_evaluation = 7; + string name = 1 [(gogoproto.jsontag) = "name" ]; + string query = 2 [(gogoproto.jsontag) = "query" ]; + LabelSet labels = 3 [(gogoproto.jsontag) = "labels", (gogoproto.nullable) = false ]; + string health = 4 [(gogoproto.jsontag) = "health" ]; + string last_error = 5 [(gogoproto.jsontag) = "lastError,omitempty" ]; + double evaluation_duration_seconds = 6 [(gogoproto.jsontag) = "evaluationTime" ]; + google.protobuf.Timestamp last_evaluation = 7 [(gogoproto.jsontag) = "lastEvaluation", (gogoproto.stdtime) = true, (gogoproto.nullable) = false ]; } - -message Timestamp { - // Represents seconds of UTC time since Unix epoch - // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - // 9999-12-31T23:59:59Z inclusive. - int64 seconds = 1; - - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 - // inclusive. - int32 nanos = 2; -} \ No newline at end of file diff --git a/pkg/store/acceptance_test.go b/pkg/store/acceptance_test.go index 0deabebf33..274f32386c 100644 --- a/pkg/store/acceptance_test.go +++ b/pkg/store/acceptance_test.go @@ -42,7 +42,7 @@ import ( ) type labelNameCallCase struct { - matchers []*storepb.LabelMatcher + matchers []storepb.LabelMatcher start int64 end int64 @@ -53,7 +53,7 @@ type labelNameCallCase struct { type labelValuesCallCase struct { label string - matchers []*storepb.LabelMatcher + matchers []storepb.LabelMatcher start int64 end int64 @@ -62,7 +62,7 @@ type labelValuesCallCase struct { } type seriesCallCase struct { - matchers []*storepb.LabelMatcher + matchers []storepb.LabelMatcher start int64 end int64 skipChunks bool @@ -149,30 +149,30 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), expectedNames: []string{"bar", "foo", "region"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "barvalue1"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "barvalue1"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), expectedNames: []string{"foo", "region"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "foovalue2"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "foovalue2"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "different"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "different"}}, }, // Matchers on external labels. { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), expectedNames: []string{"bar", "foo", "region"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "different"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "different"}}, }, }, labelValuesCalls: []labelValuesCallCase{ @@ -188,13 +188,13 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { end: timestamp.FromTime(maxTime), label: "foo", expectedValues: []string{"foovalue1"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "barvalue1"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "barvalue1"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "foo", - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "different"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "bar", Value: "different"}}, }, // Matchers on external labels. { @@ -202,40 +202,40 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { end: timestamp.FromTime(maxTime), label: "region", expectedValues: []string(nil), - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "__name__", Value: "nonexistent"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "__name__", Value: "nonexistent"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "region", expectedValues: []string(nil), - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-east"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-east"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "foo", expectedValues: []string{"foovalue1", "foovalue2"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "bar", expectedValues: []string{"barvalue1"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "foo", - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "different"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "different"}}, }, { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "bar", - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "different"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_EQ, Name: "region", Value: "different"}}, }, }, }, @@ -250,7 +250,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, skipChunks: true, @@ -275,7 +275,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "region", - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "__name__", Value: "up"}, {Type: storepb.LabelMatcher_EQ, Name: "job", Value: "C"}, }, @@ -304,7 +304,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, }, expectedLabels: []labels.Labels{ @@ -316,7 +316,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_EQ, Name: "i", Value: "a"}, }, @@ -327,7 +327,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_EQ, Name: "i", Value: "missing"}, }, @@ -336,7 +336,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "missing", Value: ""}, }, expectedLabels: []labels.Labels{ @@ -350,7 +350,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "n", Value: "1"}, }, expectedLabels: []labels.Labels{ @@ -361,7 +361,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: ".+"}, }, expectedLabels: []labels.Labels{ @@ -372,7 +372,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: ".*"}, }, expectedLabels: []labels.Labels{ @@ -386,7 +386,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "i", Value: ""}, }, expectedLabels: []labels.Labels{ @@ -398,7 +398,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "i", Value: ""}, }, expectedLabels: []labels.Labels{ @@ -409,7 +409,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "missing", Value: ""}, }, expectedLabels: []labels.Labels{}, @@ -417,7 +417,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NEQ, Name: "i", Value: "a"}, }, @@ -429,7 +429,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "n", Value: "^1$"}, }, expectedLabels: []labels.Labels{ @@ -441,7 +441,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^a$"}, }, @@ -452,7 +452,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^a?$"}, }, @@ -464,7 +464,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^$"}, }, expectedLabels: []labels.Labels{ @@ -476,7 +476,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^$"}, }, @@ -487,7 +487,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^.*$"}, }, @@ -500,7 +500,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^.+$"}, }, @@ -512,7 +512,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "n", Value: "^1$"}, }, expectedLabels: []labels.Labels{ @@ -523,7 +523,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "n", Value: "1"}, }, expectedLabels: []labels.Labels{ @@ -534,7 +534,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "n", Value: "1|2.5"}, }, expectedLabels: []labels.Labels{ @@ -544,7 +544,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "n", Value: "(1|2.5)"}, }, expectedLabels: []labels.Labels{ @@ -554,7 +554,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NRE, Name: "i", Value: "^a$"}, }, @@ -566,7 +566,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NRE, Name: "i", Value: "^a?$"}, }, @@ -577,7 +577,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NRE, Name: "i", Value: "^$"}, }, @@ -589,7 +589,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NRE, Name: "i", Value: "^.*$"}, }, @@ -598,7 +598,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NRE, Name: "i", Value: "^.+$"}, }, @@ -609,7 +609,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NEQ, Name: "i", Value: ""}, {Type: storepb.LabelMatcher_EQ, Name: "i", Value: "a"}, @@ -621,7 +621,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "n", Value: "1"}, {Type: storepb.LabelMatcher_NEQ, Name: "i", Value: "b"}, {Type: storepb.LabelMatcher_RE, Name: "i", Value: "^(b|a).*$"}, @@ -633,7 +633,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "n", Value: "(1|2)"}, }, expectedLabels: []labels.Labels{ @@ -646,7 +646,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: "a|b"}, }, expectedLabels: []labels.Labels{ @@ -657,7 +657,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: "(a|b)"}, }, expectedLabels: []labels.Labels{ @@ -668,7 +668,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "n", Value: "x1|2"}, }, expectedLabels: []labels.Labels{ @@ -678,7 +678,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "n", Value: "2|2\\.5"}, }, expectedLabels: []labels.Labels{ @@ -689,7 +689,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: "c||d"}, }, expectedLabels: []labels.Labels{ @@ -701,7 +701,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: "(c||d)"}, }, expectedLabels: []labels.Labels{ @@ -724,7 +724,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), expectedNames: []string{"__name__", "foo", "region"}, - matchers: []*storepb.LabelMatcher{{Type: storepb.LabelMatcher_RE, Name: "region", Value: ".*"}}, + matchers: []storepb.LabelMatcher{{Type: storepb.LabelMatcher_RE, Name: "region", Value: ".*"}}, }, }, labelValuesCalls: []labelValuesCallCase{ @@ -732,7 +732,7 @@ func testStoreAPIsAcceptance(t *testing.T, startStore startStoreFn) { start: timestamp.FromTime(minTime), end: timestamp.FromTime(maxTime), label: "region", - matchers: []*storepb.LabelMatcher{ + matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "__name__", Value: "up"}, {Type: storepb.LabelMatcher_RE, Name: "region", Value: ".*"}, }, @@ -854,7 +854,7 @@ func testStoreAPIsSeriesSplitSamplesIntoChunksWithMaxSizeOf120(t *testing.T, sta testutil.Ok(t, client.Series(&storepb.SeriesRequest{ MinTime: baseT, MaxTime: baseT + offset, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, {Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}, }, @@ -864,7 +864,7 @@ func testStoreAPIsSeriesSplitSamplesIntoChunksWithMaxSizeOf120(t *testing.T, sta firstSeries := srv.SeriesSet[0] - testutil.Equals(t, []*labelpb.Label{ + testutil.Equals(t, []labelpb.Label{ {Name: "a", Value: "b"}, {Name: "region", Value: "eu-west"}, }, firstSeries.Labels) @@ -1144,7 +1144,7 @@ func TestProxyStoreWithTSDBSelector_Acceptance(t *testing.T) { testutil.Ok(t, client.Series(&storepb.SeriesRequest{ MinTime: minTime.Unix(), MaxTime: maxTime.Unix(), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, }, }, srv)) diff --git a/pkg/store/bucket.go b/pkg/store/bucket.go index 73f5c337ba..644cea2db8 100644 --- a/pkg/store/bucket.go +++ b/pkg/store/bucket.go @@ -408,7 +408,7 @@ type BucketStore struct { partitioner Partitioner filterConfig *FilterConfig - advLabelSets []*labelpb.LabelSet + advLabelSets []labelpb.LabelSet enableCompatibilityLabel bool // Every how many posting offset entry we pool in heap memory. Default in Prometheus is 32. @@ -709,9 +709,9 @@ func (s *BucketStore) SyncBlocks(ctx context.Context) error { // Sync advertise labels. s.mtx.Lock() - s.advLabelSets = make([]*labelpb.LabelSet, 0, len(s.advLabelSets)) + s.advLabelSets = make([]labelpb.LabelSet, 0, len(s.advLabelSets)) for _, bs := range s.blockSets { - s.advLabelSets = append(s.advLabelSets, &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(bs.labels.Copy())}) + s.advLabelSets = append(s.advLabelSets, labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(bs.labels.Copy())}) } sort.Slice(s.advLabelSets, func(i, j int) bool { return strings.Compare(s.advLabelSets[i].String(), s.advLabelSets[j].String()) < 0 @@ -899,16 +899,16 @@ func (s *BucketStore) TimeRange() (mint, maxt int64) { } // TSDBInfos returns a list of infopb.TSDBInfos for blocks in the bucket store. -func (s *BucketStore) TSDBInfos() []*infopb.TSDBInfo { +func (s *BucketStore) TSDBInfos() []infopb.TSDBInfo { s.mtx.RLock() defer s.mtx.RUnlock() - infoMap := make(map[uint64][]*infopb.TSDBInfo, len(s.blocks)) + infoMap := make(map[uint64][]infopb.TSDBInfo, len(s.blocks)) for _, b := range s.blocks { lbls := labels.FromMap(b.meta.Thanos.Labels) hash := lbls.Hash() - infoMap[hash] = append(infoMap[hash], &infopb.TSDBInfo{ - Labels: &labelpb.LabelSet{ + infoMap[hash] = append(infoMap[hash], infopb.TSDBInfo{ + Labels: labelpb.LabelSet{ Labels: labelpb.PromLabelsToLabelpbLabels(lbls), }, MinTime: b.meta.MinTime, @@ -917,7 +917,7 @@ func (s *BucketStore) TSDBInfos() []*infopb.TSDBInfo { } // join adjacent blocks so we emit less TSDBInfos - res := make([]*infopb.TSDBInfo, 0, len(s.blocks)) + res := make([]infopb.TSDBInfo, 0, len(s.blocks)) for _, infos := range infoMap { sort.Slice(infos, func(i, j int) bool { return infos[i].MinTime < infos[j].MinTime }) @@ -938,13 +938,13 @@ func (s *BucketStore) TSDBInfos() []*infopb.TSDBInfo { return res } -func (s *BucketStore) LabelSet() []*labelpb.LabelSet { +func (s *BucketStore) LabelSet() []labelpb.LabelSet { s.mtx.RLock() labelSets := s.advLabelSets s.mtx.RUnlock() if s.enableCompatibilityLabel && len(labelSets) > 0 { - labelSets = append(labelSets, &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: CompatibilityTypeLabelName, Value: "store"}}}) + labelSets = append(labelSets, labelpb.LabelSet{Labels: []labelpb.Label{{Name: CompatibilityTypeLabelName, Value: "store"}}}) } return labelSets @@ -981,7 +981,7 @@ func (s *BucketStore) limitMaxTime(maxt int64) int64 { type seriesEntry struct { lset labels.Labels refs []chunks.ChunkRef - chks []*storepb.AggrChunk + chks []storepb.AggrChunk } // blockSeriesClient is a storepb.Store_SeriesClient for a @@ -1299,13 +1299,13 @@ OUTER: // Schedule loading chunks. s.refs = make([]chunks.ChunkRef, 0, len(b.chkMetas)) - s.chks = make([]*storepb.AggrChunk, 0, len(b.chkMetas)) + s.chks = make([]storepb.AggrChunk, 0, len(b.chkMetas)) for j, meta := range b.chkMetas { if err := b.chunkr.addLoad(meta.Ref, len(b.entries), j); err != nil { return errors.Wrap(err, "add chunk load") } - s.chks = append(s.chks, &storepb.AggrChunk{ + s.chks = append(s.chks, storepb.AggrChunk{ MinTime: meta.MinTime, MaxTime: meta.MaxTime, }) @@ -1755,7 +1755,7 @@ func (s *BucketStore) Series(req *storepb.SeriesRequest, seriesSrv storepb.Store return srv.Flush() } -func chunksSize(chks []*storepb.AggrChunk) (size int) { +func chunksSize(chks []storepb.AggrChunk) (size int) { for _, chk := range chks { size += chk.Size() // This gets the encoded proto size. } @@ -3627,7 +3627,7 @@ func (r *bucketChunkReader) loadChunks(ctx context.Context, res []seriesEntry, a chunkLen = n + 1 + int(chunkDataLen) if chunkLen <= len(cb) { c := rawChunk(cb[n:chunkLen]) - err = populateChunk(res[pIdx.seriesEntry].chks[pIdx.chunk], &c, aggrs, r.save, calculateChunkChecksum) + err = populateChunk(&(res[pIdx.seriesEntry].chks[pIdx.chunk]), &c, aggrs, r.save, calculateChunkChecksum) if err != nil { return errors.Wrap(err, "populate chunk") } @@ -3654,7 +3654,7 @@ func (r *bucketChunkReader) loadChunks(ctx context.Context, res []seriesEntry, a stats.add(ChunksFetched, 1, len(*nb)) c := rawChunk((*nb)[n:]) - err = populateChunk(res[pIdx.seriesEntry].chks[pIdx.chunk], &c, aggrs, r.save, calculateChunkChecksum) + err = populateChunk(&(res[pIdx.seriesEntry].chks[pIdx.chunk]), &c, aggrs, r.save, calculateChunkChecksum) if err != nil { r.block.chunkPool.Put(nb) return errors.Wrap(err, "populate chunk") @@ -3859,14 +3859,8 @@ func (s *queryStats) toHints() *hintspb.QueryStats { MergedSeriesCount: int64(s.mergedSeriesCount), MergedChunksCount: int64(s.mergedChunksCount), DataDownloadedSizeSum: int64(s.DataDownloadedSizeSum), - GetAllDuration: &types.Duration{ - Seconds: int64(s.GetAllDuration / time.Second), - Nanos: int32(s.GetAllDuration % time.Second), - }, - MergeDuration: &types.Duration{ - Seconds: int64(s.MergeDuration / time.Second), - Nanos: int32(s.MergeDuration % time.Second), - }, + GetAllDuration: s.GetAllDuration, + MergeDuration: s.MergeDuration, } } diff --git a/pkg/store/bucket_e2e_test.go b/pkg/store/bucket_e2e_test.go index 9135f40611..051328f12c 100644 --- a/pkg/store/bucket_e2e_test.go +++ b/pkg/store/bucket_e2e_test.go @@ -242,19 +242,19 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { // TODO(bwplotka): Add those test cases to TSDB querier_test.go as well, there are no tests for matching. for i, tcase := range []struct { req *storepb.SeriesRequest - expected [][]*labelpb.Label + expected [][]labelpb.Label expectedChunkLen int }{ { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1|2"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -267,7 +267,7 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1|2"}, }, MinTime: mint, @@ -275,7 +275,7 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { WithoutReplicaLabels: []string{"ext1", "ext2"}, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}}, @@ -288,14 +288,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -304,14 +304,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "a", Value: "2"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -320,14 +320,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "a", Value: "not_existing"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -340,14 +340,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NRE, Name: "not_existing", Value: "1"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -360,14 +360,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "b", Value: "2"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "2"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, }, @@ -375,7 +375,7 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { { // Matching by external label should work as well. req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, {Type: storepb.LabelMatcher_EQ, Name: "ext2", Value: "value2"}, }, @@ -383,14 +383,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "2"}, {Name: "ext2", Value: "value2"}}, }, }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, {Type: storepb.LabelMatcher_EQ, Name: "ext2", Value: "wrong-value"}, }, @@ -400,14 +400,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "a", Value: "2"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -416,14 +416,14 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { }, { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "a", Value: "not_existing"}, }, MinTime: mint, MaxTime: maxt, }, expectedChunkLen: 3, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -438,7 +438,7 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { // Problem: Matcher that was selecting NO series, was ignored instead of passed as emptyPosting to Intersect. { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "non_existing", Value: "something"}, }, @@ -449,7 +449,7 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { // Test skip-chunk option. { req: &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, MinTime: mint, @@ -457,7 +457,7 @@ func testBucketStore_e2e(t *testing.T, ctx context.Context, s *storeSuite) { SkipChunks: true, }, expectedChunkLen: 0, - expected: [][]*labelpb.Label{ + expected: [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -578,14 +578,14 @@ func TestBucketStore_TimePartitioning_e2e(t *testing.T) { testutil.Equals(t, filterMaxTime.PrometheusTimestamp(), maxt) req := &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, MinTime: mint, MaxTime: timestamp.FromTime(time.Now().AddDate(0, 0, 1)), } - expectedLabels := [][]*labelpb.Label{ + expectedLabels := [][]labelpb.Label{ {{Name: "a", Value: "1"}, {Name: "b", Value: "1"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "b", Value: "2"}, {Name: "ext1", Value: "value1"}}, {{Name: "a", Value: "1"}, {Name: "c", Value: "1"}, {Name: "ext2", Value: "value2"}}, @@ -653,7 +653,7 @@ func TestBucketStore_Series_ChunksLimiter_e2e(t *testing.T) { testutil.Ok(t, s.store.SyncBlocks(ctx)) req := &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, MinTime: minTimeDuration.PrometheusTimestamp(), @@ -698,7 +698,7 @@ func TestBucketStore_Series_CustomBytesLimiters_e2e(t *testing.T) { testutil.Ok(t, s.store.SyncBlocks(ctx)) req := &storepb.SeriesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, MinTime: minTimeDuration.PrometheusTimestamp(), @@ -753,7 +753,7 @@ func TestBucketStore_LabelNames_e2e(t *testing.T) { req: &storepb.LabelNamesRequest{ Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "a", @@ -767,7 +767,7 @@ func TestBucketStore_LabelNames_e2e(t *testing.T) { req: &storepb.LabelNamesRequest{ Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "b", @@ -782,7 +782,7 @@ func TestBucketStore_LabelNames_e2e(t *testing.T) { req: &storepb.LabelNamesRequest{ Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "b", @@ -796,7 +796,7 @@ func TestBucketStore_LabelNames_e2e(t *testing.T) { req: &storepb.LabelNamesRequest{ Start: timestamp.FromTime(time.Now().Add(-24 * time.Hour)), End: timestamp.FromTime(time.Now().Add(-23 * time.Hour)), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "a", @@ -847,7 +847,7 @@ func TestBucketStore_LabelNames_SeriesLimiter_e2e(t *testing.T) { s := prepareStoreWithTestBlocks(t, dir, bkt, false, NewChunksLimiterFactory(0), NewSeriesLimiterFactory(testData.maxSeriesLimit), NewBytesLimiterFactory(0), emptyRelabelConfig, allowAllFilterConf) testutil.Ok(t, s.store.SyncBlocks(ctx)) req := &storepb.LabelNamesRequest{ - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, Start: minTimeDuration.PrometheusTimestamp(), @@ -911,7 +911,7 @@ func TestBucketStore_LabelValues_e2e(t *testing.T) { Label: "a", Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "a", @@ -926,7 +926,7 @@ func TestBucketStore_LabelValues_e2e(t *testing.T) { Label: "a", Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "a", @@ -954,7 +954,7 @@ func TestBucketStore_LabelValues_e2e(t *testing.T) { Label: "ext1", Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "c", @@ -1010,7 +1010,7 @@ func TestBucketStore_LabelValues_SeriesLimiter_e2e(t *testing.T) { Label: "a", Start: minTimeDuration.PrometheusTimestamp(), End: maxTimeDuration.PrometheusTimestamp(), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ { Type: storepb.LabelMatcher_EQ, Name: "a", diff --git a/pkg/store/bucket_test.go b/pkg/store/bucket_test.go index 6aa584b553..e7247851bc 100644 --- a/pkg/store/bucket_test.go +++ b/pkg/store/bucket_test.go @@ -714,22 +714,22 @@ func TestBucketStore_TSDBInfo(t *testing.T) { testutil.Ok(t, bucketStore.SyncBlocks(ctx)) infos := bucketStore.TSDBInfos() - slices.SortFunc(infos, func(a, b *infopb.TSDBInfo) int { + slices.SortFunc(infos, func(a, b infopb.TSDBInfo) int { return strings.Compare(a.Labels.String(), b.Labels.String()) }) - testutil.Equals(t, infos, []*infopb.TSDBInfo{ + testutil.Equals(t, infos, []infopb.TSDBInfo{ { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "a", Value: "b"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "a", Value: "b"}}}, MinTime: 0, MaxTime: 2000, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "a", Value: "b"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "a", Value: "b"}}}, MinTime: 3000, MaxTime: 5000, }, { - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{{Name: "a", Value: "c"}}}, + Labels: labelpb.LabelSet{Labels: []labelpb.Label{{Name: "a", Value: "c"}}}, MinTime: 0, MaxTime: 2000, }, @@ -806,32 +806,32 @@ func testSharding(t *testing.T, reuseDisk string, bkt objstore.Bucket, all ...ul name string relabel string expectedIDs []ulid.ULID - expectedAdvLabels []*labelpb.LabelSet + expectedAdvLabels []labelpb.LabelSet }{ { name: "no sharding", expectedIDs: all, - expectedAdvLabels: []*labelpb.LabelSet{ + expectedAdvLabels: []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "a"}, {Name: "region", Value: "r1"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "a"}, {Name: "region", Value: "r2"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "b"}, {Name: "region", Value: "r1"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: CompatibilityTypeLabelName, Value: "store"}, }, }, @@ -846,15 +846,15 @@ func testSharding(t *testing.T, reuseDisk string, bkt objstore.Bucket, all ...ul - cluster `, expectedIDs: []ulid.ULID{all[2]}, - expectedAdvLabels: []*labelpb.LabelSet{ + expectedAdvLabels: []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "b"}, {Name: "region", Value: "r1"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: CompatibilityTypeLabelName, Value: "store"}, }, }, @@ -869,21 +869,21 @@ func testSharding(t *testing.T, reuseDisk string, bkt objstore.Bucket, all ...ul - cluster `, expectedIDs: []ulid.ULID{all[0], all[1], all[3]}, - expectedAdvLabels: []*labelpb.LabelSet{ + expectedAdvLabels: []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "a"}, {Name: "region", Value: "r1"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "a"}, {Name: "region", Value: "r2"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: CompatibilityTypeLabelName, Value: "store"}, }, }, @@ -902,15 +902,15 @@ func testSharding(t *testing.T, reuseDisk string, bkt objstore.Bucket, all ...ul - region `, expectedIDs: []ulid.ULID{all[0], all[1]}, - expectedAdvLabels: []*labelpb.LabelSet{ + expectedAdvLabels: []labelpb.LabelSet{ { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: "cluster", Value: "a"}, {Name: "region", Value: "r1"}, }, }, { - Labels: []*labelpb.Label{ + Labels: []labelpb.Label{ {Name: CompatibilityTypeLabelName, Value: "store"}, }, }, @@ -929,7 +929,7 @@ func testSharding(t *testing.T, reuseDisk string, bkt objstore.Bucket, all ...ul - region `, expectedIDs: []ulid.ULID{}, - expectedAdvLabels: []*labelpb.LabelSet{}, + expectedAdvLabels: []labelpb.LabelSet{}, }, } { t.Run(sc.name, func(t *testing.T) { @@ -1754,7 +1754,7 @@ func TestBucketSeries_OneBlock_InMemIndexCacheSegfault(t *testing.T) { testutil.Ok(t, store.Series(&storepb.SeriesRequest{ MinTime: 0, MaxTime: int64(numSeries) - 1, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, {Type: storepb.LabelMatcher_EQ, Name: "b", Value: "1"}, // This bug shows only when we use lot's of symbols for matching. @@ -1769,7 +1769,7 @@ func TestBucketSeries_OneBlock_InMemIndexCacheSegfault(t *testing.T) { testutil.Ok(t, store.Series(&storepb.SeriesRequest{ MinTime: 0, MaxTime: int64(numSeries) - 1, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, {Type: storepb.LabelMatcher_EQ, Name: "b", Value: "2"}, // This bug shows only when we use lot's of symbols for matching. @@ -1786,7 +1786,7 @@ func TestBucketSeries_OneBlock_InMemIndexCacheSegfault(t *testing.T) { testutil.Ok(t, store.Series(&storepb.SeriesRequest{ MinTime: 0, MaxTime: int64(numSeries) - 1, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, {Type: storepb.LabelMatcher_EQ, Name: "b", Value: "1"}, // This bug shows only when we use lot's of symbols for matching. @@ -1808,14 +1808,14 @@ func TestSeries_RequestAndResponseHints(t *testing.T) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: 1, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, }, ExpectedSeries: seriesSet1, ExpectedHints: []hintspb.SeriesResponseHints{ { - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, }, @@ -1826,14 +1826,14 @@ func TestSeries_RequestAndResponseHints(t *testing.T) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, }, ExpectedSeries: append(append([]*storepb.Series{}, seriesSet1...), seriesSet2...), ExpectedHints: []hintspb.SeriesResponseHints{ { - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, {Id: block2.String()}, }, @@ -1845,11 +1845,11 @@ func TestSeries_RequestAndResponseHints(t *testing.T) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, Hints: mustMarshalAny(&hintspb.SeriesRequestHints{ - BlockMatchers: []*storepb.LabelMatcher{ + BlockMatchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: block.BlockIDLabel, Value: block1.String()}, }, }), @@ -1857,7 +1857,7 @@ func TestSeries_RequestAndResponseHints(t *testing.T) { ExpectedSeries: seriesSet1, ExpectedHints: []hintspb.SeriesResponseHints{ { - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, }, @@ -1868,11 +1868,11 @@ func TestSeries_RequestAndResponseHints(t *testing.T) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, Hints: mustMarshalAny(&hintspb.SeriesRequestHints{ - BlockMatchers: []*storepb.LabelMatcher{ + BlockMatchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: block.BlockIDLabel, Value: block1.String()}, }, EnableQueryStats: true, @@ -1881,7 +1881,7 @@ func TestSeries_RequestAndResponseHints(t *testing.T) { ExpectedSeries: seriesSet1, ExpectedHints: []hintspb.SeriesResponseHints{ { - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, QueryStats: &hintspb.QueryStats{ @@ -1964,7 +1964,7 @@ func TestSeries_ErrorUnmarshallingRequestHints(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: 0, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, Hints: mustMarshalAny(&hintspb.SeriesResponseHints{}), @@ -2082,7 +2082,7 @@ func TestSeries_BlockWithMultipleChunks(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: testData.reqMinTime, MaxTime: testData.reqMaxTime, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "__name__", Value: "test"}, }, } @@ -2212,7 +2212,7 @@ func TestSeries_SeriesSortedWithoutReplicaLabels(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: math.MinInt, MaxTime: math.MaxInt64, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: ".+"}, }, WithoutReplicaLabels: testData.replicaLabels, @@ -2431,7 +2431,7 @@ func TestLabelNamesAndValuesHints(t *testing.T) { }, expectedNames: labelNamesFromSeriesSet(seriesSet1), expectedNamesHints: hintspb.LabelNamesResponseHints{ - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, }, @@ -2443,7 +2443,7 @@ func TestLabelNamesAndValuesHints(t *testing.T) { }, expectedValues: []string{"1"}, expectedValuesHints: hintspb.LabelValuesResponseHints{ - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, }, @@ -2459,7 +2459,7 @@ func TestLabelNamesAndValuesHints(t *testing.T) { append(append([]*storepb.Series{}, seriesSet1...), seriesSet2...), ), expectedNamesHints: hintspb.LabelNamesResponseHints{ - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, {Id: block2.String()}, }, @@ -2472,7 +2472,7 @@ func TestLabelNamesAndValuesHints(t *testing.T) { }, expectedValues: []string{"1"}, expectedValuesHints: hintspb.LabelValuesResponseHints{ - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, {Id: block2.String()}, }, @@ -2484,14 +2484,14 @@ func TestLabelNamesAndValuesHints(t *testing.T) { Start: 0, End: 3, Hints: mustMarshalAny(&hintspb.LabelNamesRequestHints{ - BlockMatchers: []*storepb.LabelMatcher{ + BlockMatchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: block.BlockIDLabel, Value: block1.String()}, }, }), }, expectedNames: labelNamesFromSeriesSet(seriesSet1), expectedNamesHints: hintspb.LabelNamesResponseHints{ - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, }, @@ -2501,14 +2501,14 @@ func TestLabelNamesAndValuesHints(t *testing.T) { Start: 0, End: 3, Hints: mustMarshalAny(&hintspb.LabelValuesRequestHints{ - BlockMatchers: []*storepb.LabelMatcher{ + BlockMatchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: block.BlockIDLabel, Value: block1.String()}, }, }), }, expectedValues: []string{"1"}, expectedValuesHints: hintspb.LabelValuesResponseHints{ - QueriedBlocks: []*hintspb.Block{ + QueriedBlocks: []hintspb.Block{ {Id: block1.String()}, }, }, @@ -2631,7 +2631,7 @@ func TestSeries_ChunksHaveHashRepresentation(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: int64(reqMinTime), MaxTime: int64(reqMaxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "__name__", Value: "test"}, }, } @@ -2833,7 +2833,7 @@ func benchmarkBlockSeriesWithConcurrency(b *testing.B, concurrency int, blockMet req := &storepb.SeriesRequest{ MinTime: blockMeta.MinTime, MaxTime: blockMeta.MaxTime, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "i", Value: labelMatcher}, }, SkipChunks: false, @@ -3627,7 +3627,7 @@ func TestBucketStoreDedupOnBlockSeriesSet(t *testing.T) { WithoutReplicaLabels: []string{"replica"}, MinTime: timestamp.FromTime(minTime), MaxTime: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "z", Value: ""}, }, }, srv)) @@ -3867,7 +3867,7 @@ func TestBucketStoreStreamingSeriesLimit(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: timestamp.FromTime(minTime), MaxTime: timestamp.FromTime(maxTime), - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "z", Value: "1|2"}, }, @@ -3961,7 +3961,7 @@ func TestBucketStoreMetadataLimit(t *testing.T) { MinTime: timestamp.FromTime(minTime), MaxTime: timestamp.FromTime(maxTime), Limit: testData.limit, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "j", Value: "foo"}, }, } @@ -3992,7 +3992,7 @@ func TestBucketStoreMetadataLimit(t *testing.T) { Start: timestamp.FromTime(minTime), End: timestamp.FromTime(maxTime), Limit: testData.limit, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "j", Value: "foo"}, }, } @@ -4023,7 +4023,7 @@ func TestBucketStoreMetadataLimit(t *testing.T) { End: timestamp.FromTime(maxTime), Label: "j", Limit: testData.limit, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "j", Value: "(foo|bar)"}, }, } diff --git a/pkg/store/hintspb/custom.go b/pkg/store/hintspb/custom.go index be02e41dd6..bf82d245e2 100644 --- a/pkg/store/hintspb/custom.go +++ b/pkg/store/hintspb/custom.go @@ -6,19 +6,19 @@ package hintspb import "github.com/oklog/ulid" func (m *SeriesResponseHints) AddQueriedBlock(id ulid.ULID) { - m.QueriedBlocks = append(m.QueriedBlocks, &Block{ + m.QueriedBlocks = append(m.QueriedBlocks, Block{ Id: id.String(), }) } func (m *LabelNamesResponseHints) AddQueriedBlock(id ulid.ULID) { - m.QueriedBlocks = append(m.QueriedBlocks, &Block{ + m.QueriedBlocks = append(m.QueriedBlocks, Block{ Id: id.String(), }) } func (m *LabelValuesResponseHints) AddQueriedBlock(id ulid.ULID) { - m.QueriedBlocks = append(m.QueriedBlocks, &Block{ + m.QueriedBlocks = append(m.QueriedBlocks, Block{ Id: id.String(), }) } @@ -48,9 +48,6 @@ func (m *QueryStats) Merge(other *QueryStats) { m.ChunksTouched += other.ChunksTouched m.ChunksTouchedSizeSum += other.ChunksTouchedSizeSum - m.GetAllDuration.Seconds += other.GetAllDuration.Seconds - m.GetAllDuration.Nanos += other.GetAllDuration.Nanos - - m.MergeDuration.Seconds += other.MergeDuration.Seconds - m.MergeDuration.Nanos += other.MergeDuration.Nanos + m.GetAllDuration += other.GetAllDuration + m.MergeDuration += other.MergeDuration } diff --git a/pkg/store/hintspb/custom_test.go b/pkg/store/hintspb/custom_test.go index b93a47dd79..3d9691f7c1 100644 --- a/pkg/store/hintspb/custom_test.go +++ b/pkg/store/hintspb/custom_test.go @@ -4,88 +4,30 @@ package hintspb import ( + "reflect" "testing" "github.com/efficientgo/core/testutil" - protobuf "github.com/gogo/protobuf/types" ) func TestQueryStatsMerge(t *testing.T) { - s := &QueryStats{ - BlocksQueried: 1, - MergedSeriesCount: 1, - MergedChunksCount: 1, - DataDownloadedSizeSum: 1, - PostingsFetched: 1, - PostingsToFetch: 1, - PostingsFetchCount: 1, - PostingsFetchedSizeSum: 1, - PostingsTouched: 1, - PostingsTouchedSizeSum: 1, - SeriesFetched: 1, - SeriesFetchCount: 1, - SeriesFetchedSizeSum: 1, - SeriesTouched: 1, - SeriesTouchedSizeSum: 1, - ChunksFetched: 1, - ChunksFetchCount: 1, - ChunksFetchedSizeSum: 1, - ChunksTouched: 1, - ChunksTouchedSizeSum: 1, - GetAllDuration: &protobuf.Duration{Seconds: 1, Nanos: 1}, - MergeDuration: &protobuf.Duration{Seconds: 1, Nanos: 1}, + s := &QueryStats{} + ps := reflect.Indirect(reflect.ValueOf(s)) + for i := 0; i < ps.NumField(); i++ { + ps.FieldByIndex([]int{i}).SetInt(int64(1)) } - o := &QueryStats{ - BlocksQueried: 1, - MergedSeriesCount: 1, - MergedChunksCount: 1, - DataDownloadedSizeSum: 1, - PostingsFetched: 1, - PostingsToFetch: 1, - PostingsFetchCount: 1, - PostingsFetchedSizeSum: 1, - PostingsTouched: 1, - PostingsTouchedSizeSum: 1, - SeriesFetched: 1, - SeriesFetchCount: 1, - SeriesFetchedSizeSum: 1, - SeriesTouched: 1, - SeriesTouchedSizeSum: 1, - ChunksFetched: 1, - ChunksFetchCount: 1, - ChunksFetchedSizeSum: 1, - ChunksTouched: 1, - ChunksTouchedSizeSum: 1, - GetAllDuration: &protobuf.Duration{Seconds: 1, Nanos: 1}, - MergeDuration: &protobuf.Duration{Seconds: 1, Nanos: 1}, + o := &QueryStats{} + po := reflect.Indirect(reflect.ValueOf(o)) + for i := 0; i < po.NumField(); i++ { + po.FieldByIndex([]int{i}).SetInt(int64(100)) } - s.Merge(o) // Expected stats. - e := &QueryStats{ - BlocksQueried: 2, - MergedSeriesCount: 2, - MergedChunksCount: 2, - DataDownloadedSizeSum: 2, - PostingsFetched: 2, - PostingsToFetch: 2, - PostingsFetchCount: 2, - PostingsFetchedSizeSum: 2, - PostingsTouched: 2, - PostingsTouchedSizeSum: 2, - SeriesFetched: 2, - SeriesFetchCount: 2, - SeriesFetchedSizeSum: 2, - SeriesTouched: 2, - SeriesTouchedSizeSum: 2, - ChunksFetched: 2, - ChunksFetchCount: 2, - ChunksFetchedSizeSum: 2, - ChunksTouched: 2, - ChunksTouchedSizeSum: 2, - GetAllDuration: &protobuf.Duration{Seconds: 2, Nanos: 2}, - MergeDuration: &protobuf.Duration{Seconds: 2, Nanos: 2}, + e := &QueryStats{} + pe := reflect.Indirect(reflect.ValueOf(e)) + for i := 0; i < pe.NumField(); i++ { + pe.FieldByIndex([]int{i}).SetInt(int64(101)) } testutil.Equals(t, e, s) } diff --git a/pkg/store/hintspb/hints.pb.go b/pkg/store/hintspb/hints.pb.go index 820e70277f..2098d7489c 100644 --- a/pkg/store/hintspb/hints.pb.go +++ b/pkg/store/hintspb/hints.pb.go @@ -5,12 +5,16 @@ package hintspb import ( fmt "fmt" + + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" + io "io" math "math" math_bits "math/bits" + time "time" - proto "github.com/gogo/protobuf/proto" - protobuf "github.com/gogo/protobuf/types" storepb "github.com/thanos-io/thanos/pkg/store/storepb" ) @@ -18,6 +22,7 @@ import ( var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -29,11 +34,8 @@ type SeriesRequestHints struct { /// block_matchers is a list of label matchers that are evaluated against each single block's /// labels to filter which blocks get queried. If the list is empty, no per-block filtering /// is applied. - BlockMatchers []*storepb.LabelMatcher `protobuf:"bytes,1,rep,name=block_matchers,json=blockMatchers,proto3" json:"block_matchers,omitempty"` - EnableQueryStats bool `protobuf:"varint,2,opt,name=enable_query_stats,json=enableQueryStats,proto3" json:"enable_query_stats,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + BlockMatchers []storepb.LabelMatcher `protobuf:"bytes,1,rep,name=block_matchers,json=blockMatchers,proto3" json:"block_matchers"` + EnableQueryStats bool `protobuf:"varint,2,opt,name=enable_query_stats,json=enableQueryStats,proto3" json:"enable_query_stats,omitempty"` } func (m *SeriesRequestHints) Reset() { *m = SeriesRequestHints{} } @@ -69,28 +71,11 @@ func (m *SeriesRequestHints) XXX_DiscardUnknown() { var xxx_messageInfo_SeriesRequestHints proto.InternalMessageInfo -func (m *SeriesRequestHints) GetBlockMatchers() []*storepb.LabelMatcher { - if m != nil { - return m.BlockMatchers - } - return nil -} - -func (m *SeriesRequestHints) GetEnableQueryStats() bool { - if m != nil { - return m.EnableQueryStats - } - return false -} - type SeriesResponseHints struct { /// queried_blocks is the list of blocks that have been queried. - QueriedBlocks []*Block `protobuf:"bytes,1,rep,name=queried_blocks,json=queriedBlocks,proto3" json:"queried_blocks,omitempty"` + QueriedBlocks []Block `protobuf:"bytes,1,rep,name=queried_blocks,json=queriedBlocks,proto3" json:"queried_blocks"` /// query_stats contains statistics of querying store gateway. - QueryStats *QueryStats `protobuf:"bytes,2,opt,name=query_stats,json=queryStats,proto3" json:"query_stats,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + QueryStats *QueryStats `protobuf:"bytes,2,opt,name=query_stats,json=queryStats,proto3" json:"query_stats,omitempty"` } func (m *SeriesResponseHints) Reset() { *m = SeriesResponseHints{} } @@ -126,25 +111,8 @@ func (m *SeriesResponseHints) XXX_DiscardUnknown() { var xxx_messageInfo_SeriesResponseHints proto.InternalMessageInfo -func (m *SeriesResponseHints) GetQueriedBlocks() []*Block { - if m != nil { - return m.QueriedBlocks - } - return nil -} - -func (m *SeriesResponseHints) GetQueryStats() *QueryStats { - if m != nil { - return m.QueryStats - } - return nil -} - type Block struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` } func (m *Block) Reset() { *m = Block{} } @@ -180,21 +148,11 @@ func (m *Block) XXX_DiscardUnknown() { var xxx_messageInfo_Block proto.InternalMessageInfo -func (m *Block) GetId() string { - if m != nil { - return m.Id - } - return "" -} - type LabelNamesRequestHints struct { /// block_matchers is a list of label matchers that are evaluated against each single block's /// labels to filter which blocks get queried. If the list is empty, no per-block filtering /// is applied. - BlockMatchers []*storepb.LabelMatcher `protobuf:"bytes,1,rep,name=block_matchers,json=blockMatchers,proto3" json:"block_matchers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + BlockMatchers []storepb.LabelMatcher `protobuf:"bytes,1,rep,name=block_matchers,json=blockMatchers,proto3" json:"block_matchers"` } func (m *LabelNamesRequestHints) Reset() { *m = LabelNamesRequestHints{} } @@ -230,19 +188,9 @@ func (m *LabelNamesRequestHints) XXX_DiscardUnknown() { var xxx_messageInfo_LabelNamesRequestHints proto.InternalMessageInfo -func (m *LabelNamesRequestHints) GetBlockMatchers() []*storepb.LabelMatcher { - if m != nil { - return m.BlockMatchers - } - return nil -} - type LabelNamesResponseHints struct { /// queried_blocks is the list of blocks that have been queried. - QueriedBlocks []*Block `protobuf:"bytes,1,rep,name=queried_blocks,json=queriedBlocks,proto3" json:"queried_blocks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + QueriedBlocks []Block `protobuf:"bytes,1,rep,name=queried_blocks,json=queriedBlocks,proto3" json:"queried_blocks"` } func (m *LabelNamesResponseHints) Reset() { *m = LabelNamesResponseHints{} } @@ -278,21 +226,11 @@ func (m *LabelNamesResponseHints) XXX_DiscardUnknown() { var xxx_messageInfo_LabelNamesResponseHints proto.InternalMessageInfo -func (m *LabelNamesResponseHints) GetQueriedBlocks() []*Block { - if m != nil { - return m.QueriedBlocks - } - return nil -} - type LabelValuesRequestHints struct { /// block_matchers is a list of label matchers that are evaluated against each single block's /// labels to filter which blocks get queried. If the list is empty, no per-block filtering /// is applied. - BlockMatchers []*storepb.LabelMatcher `protobuf:"bytes,1,rep,name=block_matchers,json=blockMatchers,proto3" json:"block_matchers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + BlockMatchers []storepb.LabelMatcher `protobuf:"bytes,1,rep,name=block_matchers,json=blockMatchers,proto3" json:"block_matchers"` } func (m *LabelValuesRequestHints) Reset() { *m = LabelValuesRequestHints{} } @@ -328,19 +266,9 @@ func (m *LabelValuesRequestHints) XXX_DiscardUnknown() { var xxx_messageInfo_LabelValuesRequestHints proto.InternalMessageInfo -func (m *LabelValuesRequestHints) GetBlockMatchers() []*storepb.LabelMatcher { - if m != nil { - return m.BlockMatchers - } - return nil -} - type LabelValuesResponseHints struct { /// queried_blocks is the list of blocks that have been queried. - QueriedBlocks []*Block `protobuf:"bytes,1,rep,name=queried_blocks,json=queriedBlocks,proto3" json:"queried_blocks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + QueriedBlocks []Block `protobuf:"bytes,1,rep,name=queried_blocks,json=queriedBlocks,proto3" json:"queried_blocks"` } func (m *LabelValuesResponseHints) Reset() { *m = LabelValuesResponseHints{} } @@ -376,40 +304,30 @@ func (m *LabelValuesResponseHints) XXX_DiscardUnknown() { var xxx_messageInfo_LabelValuesResponseHints proto.InternalMessageInfo -func (m *LabelValuesResponseHints) GetQueriedBlocks() []*Block { - if m != nil { - return m.QueriedBlocks - } - return nil -} - // / QueryStats fields are unstable and might change in the future. type QueryStats struct { - BlocksQueried int64 `protobuf:"varint,1,opt,name=blocks_queried,json=blocksQueried,proto3" json:"blocks_queried,omitempty"` - MergedSeriesCount int64 `protobuf:"varint,2,opt,name=merged_series_count,json=mergedSeriesCount,proto3" json:"merged_series_count,omitempty"` - MergedChunksCount int64 `protobuf:"varint,3,opt,name=merged_chunks_count,json=mergedChunksCount,proto3" json:"merged_chunks_count,omitempty"` - PostingsTouched int64 `protobuf:"varint,4,opt,name=postings_touched,json=postingsTouched,proto3" json:"postings_touched,omitempty"` - PostingsTouchedSizeSum int64 `protobuf:"varint,5,opt,name=postings_touched_size_sum,json=postingsTouchedSizeSum,proto3" json:"postings_touched_size_sum,omitempty"` - PostingsToFetch int64 `protobuf:"varint,6,opt,name=postings_to_fetch,json=postingsToFetch,proto3" json:"postings_to_fetch,omitempty"` - PostingsFetched int64 `protobuf:"varint,7,opt,name=postings_fetched,json=postingsFetched,proto3" json:"postings_fetched,omitempty"` - PostingsFetchedSizeSum int64 `protobuf:"varint,8,opt,name=postings_fetched_size_sum,json=postingsFetchedSizeSum,proto3" json:"postings_fetched_size_sum,omitempty"` - PostingsFetchCount int64 `protobuf:"varint,9,opt,name=postings_fetch_count,json=postingsFetchCount,proto3" json:"postings_fetch_count,omitempty"` - SeriesTouched int64 `protobuf:"varint,10,opt,name=series_touched,json=seriesTouched,proto3" json:"series_touched,omitempty"` - SeriesTouchedSizeSum int64 `protobuf:"varint,11,opt,name=series_touched_size_sum,json=seriesTouchedSizeSum,proto3" json:"series_touched_size_sum,omitempty"` - SeriesFetched int64 `protobuf:"varint,12,opt,name=series_fetched,json=seriesFetched,proto3" json:"series_fetched,omitempty"` - SeriesFetchedSizeSum int64 `protobuf:"varint,13,opt,name=series_fetched_size_sum,json=seriesFetchedSizeSum,proto3" json:"series_fetched_size_sum,omitempty"` - SeriesFetchCount int64 `protobuf:"varint,14,opt,name=series_fetch_count,json=seriesFetchCount,proto3" json:"series_fetch_count,omitempty"` - ChunksTouched int64 `protobuf:"varint,15,opt,name=chunks_touched,json=chunksTouched,proto3" json:"chunks_touched,omitempty"` - ChunksTouchedSizeSum int64 `protobuf:"varint,16,opt,name=chunks_touched_size_sum,json=chunksTouchedSizeSum,proto3" json:"chunks_touched_size_sum,omitempty"` - ChunksFetched int64 `protobuf:"varint,17,opt,name=chunks_fetched,json=chunksFetched,proto3" json:"chunks_fetched,omitempty"` - ChunksFetchedSizeSum int64 `protobuf:"varint,18,opt,name=chunks_fetched_size_sum,json=chunksFetchedSizeSum,proto3" json:"chunks_fetched_size_sum,omitempty"` - ChunksFetchCount int64 `protobuf:"varint,19,opt,name=chunks_fetch_count,json=chunksFetchCount,proto3" json:"chunks_fetch_count,omitempty"` - DataDownloadedSizeSum int64 `protobuf:"varint,20,opt,name=data_downloaded_size_sum,json=dataDownloadedSizeSum,proto3" json:"data_downloaded_size_sum,omitempty"` - GetAllDuration *protobuf.Duration `protobuf:"bytes,21,opt,name=get_all_duration,json=getAllDuration,proto3" json:"get_all_duration,omitempty"` - MergeDuration *protobuf.Duration `protobuf:"bytes,22,opt,name=merge_duration,json=mergeDuration,proto3" json:"merge_duration,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + BlocksQueried int64 `protobuf:"varint,1,opt,name=blocks_queried,json=blocksQueried,proto3" json:"blocks_queried,omitempty"` + MergedSeriesCount int64 `protobuf:"varint,2,opt,name=merged_series_count,json=mergedSeriesCount,proto3" json:"merged_series_count,omitempty"` + MergedChunksCount int64 `protobuf:"varint,3,opt,name=merged_chunks_count,json=mergedChunksCount,proto3" json:"merged_chunks_count,omitempty"` + PostingsTouched int64 `protobuf:"varint,4,opt,name=postings_touched,json=postingsTouched,proto3" json:"postings_touched,omitempty"` + PostingsTouchedSizeSum int64 `protobuf:"varint,5,opt,name=postings_touched_size_sum,json=postingsTouchedSizeSum,proto3" json:"postings_touched_size_sum,omitempty"` + PostingsToFetch int64 `protobuf:"varint,6,opt,name=postings_to_fetch,json=postingsToFetch,proto3" json:"postings_to_fetch,omitempty"` + PostingsFetched int64 `protobuf:"varint,7,opt,name=postings_fetched,json=postingsFetched,proto3" json:"postings_fetched,omitempty"` + PostingsFetchedSizeSum int64 `protobuf:"varint,8,opt,name=postings_fetched_size_sum,json=postingsFetchedSizeSum,proto3" json:"postings_fetched_size_sum,omitempty"` + PostingsFetchCount int64 `protobuf:"varint,9,opt,name=postings_fetch_count,json=postingsFetchCount,proto3" json:"postings_fetch_count,omitempty"` + SeriesTouched int64 `protobuf:"varint,10,opt,name=series_touched,json=seriesTouched,proto3" json:"series_touched,omitempty"` + SeriesTouchedSizeSum int64 `protobuf:"varint,11,opt,name=series_touched_size_sum,json=seriesTouchedSizeSum,proto3" json:"series_touched_size_sum,omitempty"` + SeriesFetched int64 `protobuf:"varint,12,opt,name=series_fetched,json=seriesFetched,proto3" json:"series_fetched,omitempty"` + SeriesFetchedSizeSum int64 `protobuf:"varint,13,opt,name=series_fetched_size_sum,json=seriesFetchedSizeSum,proto3" json:"series_fetched_size_sum,omitempty"` + SeriesFetchCount int64 `protobuf:"varint,14,opt,name=series_fetch_count,json=seriesFetchCount,proto3" json:"series_fetch_count,omitempty"` + ChunksTouched int64 `protobuf:"varint,15,opt,name=chunks_touched,json=chunksTouched,proto3" json:"chunks_touched,omitempty"` + ChunksTouchedSizeSum int64 `protobuf:"varint,16,opt,name=chunks_touched_size_sum,json=chunksTouchedSizeSum,proto3" json:"chunks_touched_size_sum,omitempty"` + ChunksFetched int64 `protobuf:"varint,17,opt,name=chunks_fetched,json=chunksFetched,proto3" json:"chunks_fetched,omitempty"` + ChunksFetchedSizeSum int64 `protobuf:"varint,18,opt,name=chunks_fetched_size_sum,json=chunksFetchedSizeSum,proto3" json:"chunks_fetched_size_sum,omitempty"` + ChunksFetchCount int64 `protobuf:"varint,19,opt,name=chunks_fetch_count,json=chunksFetchCount,proto3" json:"chunks_fetch_count,omitempty"` + DataDownloadedSizeSum int64 `protobuf:"varint,20,opt,name=data_downloaded_size_sum,json=dataDownloadedSizeSum,proto3" json:"data_downloaded_size_sum,omitempty"` + GetAllDuration time.Duration `protobuf:"bytes,21,opt,name=get_all_duration,json=getAllDuration,proto3,stdduration" json:"get_all_duration"` + MergeDuration time.Duration `protobuf:"bytes,22,opt,name=merge_duration,json=mergeDuration,proto3,stdduration" json:"merge_duration"` } func (m *QueryStats) Reset() { *m = QueryStats{} } @@ -445,160 +363,6 @@ func (m *QueryStats) XXX_DiscardUnknown() { var xxx_messageInfo_QueryStats proto.InternalMessageInfo -func (m *QueryStats) GetBlocksQueried() int64 { - if m != nil { - return m.BlocksQueried - } - return 0 -} - -func (m *QueryStats) GetMergedSeriesCount() int64 { - if m != nil { - return m.MergedSeriesCount - } - return 0 -} - -func (m *QueryStats) GetMergedChunksCount() int64 { - if m != nil { - return m.MergedChunksCount - } - return 0 -} - -func (m *QueryStats) GetPostingsTouched() int64 { - if m != nil { - return m.PostingsTouched - } - return 0 -} - -func (m *QueryStats) GetPostingsTouchedSizeSum() int64 { - if m != nil { - return m.PostingsTouchedSizeSum - } - return 0 -} - -func (m *QueryStats) GetPostingsToFetch() int64 { - if m != nil { - return m.PostingsToFetch - } - return 0 -} - -func (m *QueryStats) GetPostingsFetched() int64 { - if m != nil { - return m.PostingsFetched - } - return 0 -} - -func (m *QueryStats) GetPostingsFetchedSizeSum() int64 { - if m != nil { - return m.PostingsFetchedSizeSum - } - return 0 -} - -func (m *QueryStats) GetPostingsFetchCount() int64 { - if m != nil { - return m.PostingsFetchCount - } - return 0 -} - -func (m *QueryStats) GetSeriesTouched() int64 { - if m != nil { - return m.SeriesTouched - } - return 0 -} - -func (m *QueryStats) GetSeriesTouchedSizeSum() int64 { - if m != nil { - return m.SeriesTouchedSizeSum - } - return 0 -} - -func (m *QueryStats) GetSeriesFetched() int64 { - if m != nil { - return m.SeriesFetched - } - return 0 -} - -func (m *QueryStats) GetSeriesFetchedSizeSum() int64 { - if m != nil { - return m.SeriesFetchedSizeSum - } - return 0 -} - -func (m *QueryStats) GetSeriesFetchCount() int64 { - if m != nil { - return m.SeriesFetchCount - } - return 0 -} - -func (m *QueryStats) GetChunksTouched() int64 { - if m != nil { - return m.ChunksTouched - } - return 0 -} - -func (m *QueryStats) GetChunksTouchedSizeSum() int64 { - if m != nil { - return m.ChunksTouchedSizeSum - } - return 0 -} - -func (m *QueryStats) GetChunksFetched() int64 { - if m != nil { - return m.ChunksFetched - } - return 0 -} - -func (m *QueryStats) GetChunksFetchedSizeSum() int64 { - if m != nil { - return m.ChunksFetchedSizeSum - } - return 0 -} - -func (m *QueryStats) GetChunksFetchCount() int64 { - if m != nil { - return m.ChunksFetchCount - } - return 0 -} - -func (m *QueryStats) GetDataDownloadedSizeSum() int64 { - if m != nil { - return m.DataDownloadedSizeSum - } - return 0 -} - -func (m *QueryStats) GetGetAllDuration() *protobuf.Duration { - if m != nil { - return m.GetAllDuration - } - return nil -} - -func (m *QueryStats) GetMergeDuration() *protobuf.Duration { - if m != nil { - return m.MergeDuration - } - return nil -} - func init() { proto.RegisterType((*SeriesRequestHints)(nil), "hintspb.SeriesRequestHints") proto.RegisterType((*SeriesResponseHints)(nil), "hintspb.SeriesResponseHints") @@ -613,50 +377,53 @@ func init() { func init() { proto.RegisterFile("store/hintspb/hints.proto", fileDescriptor_b82aa23c4c11e83f) } var fileDescriptor_b82aa23c4c11e83f = []byte{ - // 682 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xcf, 0x6f, 0xd3, 0x30, - 0x18, 0x55, 0x56, 0xf6, 0xeb, 0x2b, 0xcd, 0x3a, 0xb7, 0xdb, 0xb2, 0x1d, 0xaa, 0xa9, 0xd2, 0xa4, - 0x81, 0xa6, 0x14, 0x0d, 0x26, 0x84, 0xb8, 0xc0, 0x36, 0x4d, 0x1c, 0x00, 0xb1, 0x14, 0x76, 0xe0, - 0x62, 0x25, 0x8d, 0xd7, 0x46, 0x4b, 0xe3, 0x2c, 0x76, 0x84, 0xb6, 0x0b, 0x12, 0x7f, 0x1d, 0x47, - 0xf8, 0x0f, 0xd0, 0xfe, 0x12, 0x14, 0xff, 0x58, 0xec, 0xee, 0xc0, 0x81, 0x5e, 0x5a, 0xf9, 0xfb, - 0xde, 0x7b, 0x7e, 0xef, 0xb3, 0x5b, 0xc3, 0x36, 0xe3, 0xb4, 0x20, 0x83, 0x49, 0x92, 0x71, 0x96, - 0x47, 0xf2, 0xdb, 0xcf, 0x0b, 0xca, 0x29, 0x5a, 0x56, 0xc5, 0x9d, 0xde, 0x98, 0xd2, 0x71, 0x4a, - 0x06, 0xa2, 0x1c, 0x95, 0x97, 0x83, 0xb8, 0x2c, 0x42, 0x9e, 0xd0, 0x4c, 0x02, 0x77, 0x94, 0x86, - 0xf8, 0xcc, 0xa3, 0x01, 0xbf, 0xc9, 0x89, 0xd2, 0xe8, 0x7f, 0x07, 0x34, 0x24, 0x45, 0x42, 0x58, - 0x40, 0xae, 0x4b, 0xc2, 0xf8, 0xbb, 0x4a, 0x12, 0xbd, 0x06, 0x37, 0x4a, 0xe9, 0xe8, 0x0a, 0x4f, - 0x43, 0x3e, 0x9a, 0x90, 0x82, 0x79, 0xce, 0x6e, 0x63, 0xbf, 0x79, 0xd8, 0xf5, 0xf9, 0x24, 0xcc, - 0x28, 0xf3, 0xdf, 0x87, 0x11, 0x49, 0x3f, 0xc8, 0x66, 0xd0, 0x12, 0x58, 0xb5, 0x62, 0xe8, 0x00, - 0x10, 0xc9, 0xc2, 0x28, 0x25, 0xf8, 0xba, 0x24, 0xc5, 0x0d, 0x66, 0x3c, 0xe4, 0xcc, 0x5b, 0xd8, - 0x75, 0xf6, 0x57, 0x82, 0xb6, 0xec, 0x9c, 0x57, 0x8d, 0x61, 0x55, 0xef, 0xff, 0x70, 0xa0, 0xa3, - 0x1d, 0xb0, 0x9c, 0x66, 0x8c, 0x48, 0x0b, 0x47, 0xe0, 0x56, 0xf4, 0x84, 0xc4, 0x58, 0xc8, 0x6b, - 0x0b, 0xae, 0xaf, 0x52, 0xfb, 0xc7, 0x55, 0x39, 0x68, 0x29, 0x94, 0x58, 0x31, 0xf4, 0x02, 0x9a, - 0xb3, 0xbb, 0x36, 0x0f, 0x3b, 0xf7, 0x9c, 0x7a, 0xe3, 0x00, 0xae, 0x6b, 0x13, 0x5b, 0xb0, 0x28, - 0xf8, 0xc8, 0x85, 0x85, 0x24, 0xf6, 0x9c, 0x5d, 0x67, 0x7f, 0x35, 0x58, 0x48, 0xe2, 0xfe, 0x17, - 0xd8, 0x14, 0x51, 0x3f, 0x86, 0xd3, 0x39, 0x8e, 0xa8, 0xff, 0x09, 0xb6, 0x4c, 0xd9, 0xff, 0xcf, - 0xdd, 0xbf, 0x50, 0x8a, 0x17, 0x61, 0x5a, 0xce, 0xd3, 0xe9, 0x39, 0x78, 0x96, 0xee, 0x1c, 0xac, - 0xfe, 0x5e, 0x01, 0xa8, 0xcf, 0x01, 0xed, 0x29, 0x7b, 0x0c, 0x2b, 0x98, 0x18, 0x7f, 0x43, 0x19, - 0x61, 0xe7, 0xb2, 0x88, 0x7c, 0xe8, 0x4c, 0x49, 0x31, 0x26, 0x31, 0x66, 0xe2, 0xb6, 0xe0, 0x11, - 0x2d, 0x33, 0x2e, 0x0e, 0xb8, 0x11, 0xac, 0xcb, 0x96, 0xbc, 0x47, 0x27, 0x55, 0xc3, 0xc0, 0x8f, - 0x26, 0x65, 0x76, 0xa5, 0xf1, 0x0d, 0x13, 0x7f, 0x22, 0x3a, 0x12, 0xff, 0x04, 0xda, 0x39, 0x65, - 0x3c, 0xc9, 0xc6, 0x0c, 0x73, 0x5a, 0x8e, 0x26, 0x24, 0xf6, 0x1e, 0x09, 0xf0, 0x9a, 0xae, 0x7f, - 0x96, 0x65, 0xf4, 0x0a, 0xb6, 0x67, 0xa1, 0x98, 0x25, 0xb7, 0x04, 0xb3, 0x72, 0xea, 0x2d, 0x0a, - 0xce, 0xe6, 0x0c, 0x67, 0x98, 0xdc, 0x92, 0x61, 0x39, 0x45, 0x4f, 0x61, 0xdd, 0xa0, 0xe2, 0x4b, - 0xc2, 0x47, 0x13, 0x6f, 0x69, 0x76, 0x9b, 0xb3, 0xaa, 0x6c, 0x39, 0x12, 0x40, 0x12, 0x7b, 0xcb, - 0x36, 0xf4, 0x4c, 0x96, 0x2d, 0x47, 0x0a, 0x5a, 0x3b, 0x5a, 0xb1, 0x1d, 0x29, 0x8e, 0x76, 0xf4, - 0x0c, 0xba, 0x36, 0x55, 0x0d, 0x6a, 0x55, 0xb0, 0x90, 0xc5, 0x92, 0x93, 0xda, 0x03, 0x57, 0x1d, - 0x81, 0x9e, 0x13, 0xc8, 0x03, 0x93, 0x55, 0x3d, 0xa5, 0x23, 0xd8, 0xb2, 0x61, 0xb5, 0xa3, 0xa6, - 0xc0, 0x77, 0x2d, 0xbc, 0xf6, 0x53, 0xab, 0xeb, 0xcc, 0x8f, 0x4d, 0x75, 0x9d, 0xb8, 0x56, 0x7f, - 0x90, 0xb7, 0x65, 0xaa, 0xcf, 0xa4, 0x3d, 0x00, 0x64, 0xd2, 0x54, 0x56, 0x57, 0x30, 0xda, 0x06, - 0xe3, 0x3e, 0xa9, 0xba, 0x3c, 0x3a, 0xe9, 0x9a, 0xf4, 0x22, 0xab, 0x46, 0x52, 0x1b, 0x56, 0x7b, - 0x69, 0x4b, 0x2f, 0x16, 0xde, 0x48, 0xaa, 0x68, 0x3a, 0xe9, 0xba, 0xa9, 0x6e, 0x24, 0xb5, 0x61, - 0xb5, 0x3a, 0x32, 0xd5, 0x1f, 0x26, 0x35, 0x69, 0x2a, 0x69, 0x47, 0x26, 0x35, 0x18, 0x32, 0xe9, - 0x4b, 0xf0, 0xe2, 0x90, 0x87, 0x38, 0xa6, 0xdf, 0xb2, 0x94, 0x86, 0xb1, 0xb9, 0x4b, 0x57, 0x70, - 0x36, 0xaa, 0xfe, 0xe9, 0x7d, 0x5b, 0x6f, 0x73, 0x02, 0xed, 0x31, 0xe1, 0x38, 0x4c, 0x53, 0xac, - 0x1f, 0x1d, 0x6f, 0x43, 0xfc, 0xe9, 0x6e, 0xfb, 0xf2, 0x55, 0xf2, 0xf5, 0xab, 0xe4, 0x9f, 0x2a, - 0x40, 0xe0, 0x8e, 0x09, 0x7f, 0x9b, 0xa6, 0x7a, 0x8d, 0xde, 0x80, 0x2b, 0x7e, 0x90, 0xb5, 0xc4, - 0xe6, 0xbf, 0x24, 0x5a, 0x82, 0xa0, 0x97, 0xc7, 0x1b, 0x3f, 0xef, 0x7a, 0xce, 0xaf, 0xbb, 0x9e, - 0xf3, 0xe7, 0xae, 0xe7, 0x7c, 0xd5, 0x0f, 0x63, 0xb4, 0x24, 0x88, 0xcf, 0xff, 0x06, 0x00, 0x00, - 0xff, 0xff, 0xb5, 0x1c, 0xa2, 0xbd, 0x45, 0x07, 0x00, 0x00, + // 731 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xcd, 0x4f, 0xdb, 0x30, + 0x1c, 0x6d, 0x28, 0x1f, 0xc5, 0x1d, 0xa1, 0xb8, 0x05, 0x02, 0x87, 0x80, 0x2a, 0x21, 0xb1, 0x09, + 0xa5, 0x13, 0xd3, 0x34, 0x6d, 0x3b, 0xf1, 0x21, 0x34, 0x4d, 0x63, 0x12, 0xe9, 0xc4, 0xa4, 0x6d, + 0x92, 0x95, 0x34, 0x26, 0x8d, 0x48, 0xe3, 0x12, 0x3b, 0x9a, 0xe0, 0xbe, 0xeb, 0xb4, 0xe3, 0xfe, + 0x24, 0x8e, 0x1c, 0x77, 0xda, 0x07, 0x68, 0xff, 0xc7, 0x14, 0x7f, 0x34, 0x4e, 0xb9, 0xec, 0xd0, + 0x0b, 0xb4, 0xef, 0xf7, 0xde, 0xf3, 0x7b, 0x76, 0x1a, 0x83, 0x35, 0xca, 0x48, 0x8a, 0x3b, 0xfd, + 0x28, 0x61, 0x74, 0xe8, 0x8b, 0xff, 0xce, 0x30, 0x25, 0x8c, 0xc0, 0x39, 0x09, 0xae, 0xb7, 0x42, + 0x12, 0x12, 0x8e, 0x75, 0xf2, 0x4f, 0x62, 0xbc, 0x6e, 0x87, 0x84, 0x84, 0x31, 0xee, 0xf0, 0x6f, + 0x7e, 0x76, 0xd6, 0x09, 0xb2, 0xd4, 0x63, 0x11, 0x49, 0xe4, 0x5c, 0x3a, 0xf3, 0xbf, 0x43, 0xbf, + 0xc3, 0x2e, 0x87, 0x58, 0x3a, 0xb7, 0xbf, 0x18, 0x00, 0x76, 0x71, 0x1a, 0x61, 0xea, 0xe2, 0x8b, + 0x0c, 0x53, 0xf6, 0x2a, 0x5f, 0x09, 0xee, 0x01, 0xd3, 0x8f, 0x49, 0xef, 0x1c, 0x0d, 0x3c, 0xd6, + 0xeb, 0xe3, 0x94, 0x5a, 0xc6, 0x66, 0x75, 0xbb, 0xbe, 0xdb, 0x72, 0x58, 0xdf, 0x4b, 0x08, 0x75, + 0xde, 0x78, 0x3e, 0x8e, 0x8f, 0xc5, 0x70, 0x7f, 0xfa, 0xfa, 0xe7, 0x46, 0xc5, 0x5d, 0xe0, 0x0a, + 0x89, 0x51, 0xb8, 0x03, 0x20, 0x4e, 0x3c, 0x3f, 0xc6, 0xe8, 0x22, 0xc3, 0xe9, 0x25, 0xa2, 0xcc, + 0x63, 0xd4, 0x9a, 0xda, 0x34, 0xb6, 0x6b, 0x6e, 0x43, 0x4c, 0x4e, 0xf2, 0x41, 0x37, 0xc7, 0xdb, + 0x5f, 0x0d, 0xd0, 0x54, 0x39, 0xe8, 0x90, 0x24, 0x14, 0x8b, 0x20, 0x2f, 0x81, 0x99, 0xcb, 0x23, + 0x1c, 0x20, 0x6e, 0xaf, 0x82, 0x98, 0x8e, 0xdc, 0x12, 0x67, 0x3f, 0x87, 0x55, 0x04, 0xc9, 0xe5, + 0x18, 0x85, 0x2f, 0x40, 0x7d, 0x7c, 0xed, 0xfa, 0x6e, 0x73, 0xa4, 0x2c, 0x96, 0xe7, 0x72, 0xc3, + 0x05, 0x17, 0x45, 0xa0, 0x55, 0x30, 0xc3, 0x5d, 0xa0, 0x09, 0xa6, 0xa2, 0xc0, 0x32, 0x36, 0x8d, + 0xed, 0x79, 0x77, 0x2a, 0x0a, 0xda, 0x1f, 0xc1, 0x0a, 0x2f, 0xff, 0xd6, 0x1b, 0x4c, 0x7c, 0xd3, + 0xda, 0xa7, 0x60, 0x55, 0x37, 0x9f, 0xd4, 0x4e, 0xb4, 0x3f, 0x49, 0xdf, 0x53, 0x2f, 0xce, 0x26, + 0x9f, 0xfa, 0x3d, 0xb0, 0x4a, 0xee, 0x13, 0x8b, 0xfd, 0xb7, 0x06, 0x40, 0x71, 0x4a, 0x70, 0x4b, + 0x46, 0xa5, 0x48, 0xd2, 0xf8, 0xb1, 0x54, 0x65, 0x1c, 0x7a, 0x22, 0x40, 0xe8, 0x80, 0xe6, 0x00, + 0xa7, 0x21, 0x0e, 0x10, 0xe5, 0x4f, 0x14, 0xea, 0x91, 0x2c, 0x61, 0xfc, 0xf8, 0xab, 0xee, 0x92, + 0x18, 0x89, 0x67, 0xed, 0x20, 0x1f, 0x68, 0xfc, 0x5e, 0x3f, 0x4b, 0xce, 0x15, 0xbf, 0xaa, 0xf3, + 0x0f, 0xf8, 0x44, 0xf0, 0x1f, 0x82, 0xc6, 0x90, 0x50, 0x16, 0x25, 0x21, 0x45, 0x8c, 0x64, 0xbd, + 0x3e, 0x0e, 0xac, 0x69, 0x4e, 0x5e, 0x54, 0xf8, 0x3b, 0x01, 0xc3, 0xe7, 0x60, 0x6d, 0x9c, 0x8a, + 0x68, 0x74, 0x85, 0x11, 0xcd, 0x06, 0xd6, 0x0c, 0xd7, 0xac, 0x8c, 0x69, 0xba, 0xd1, 0x15, 0xee, + 0x66, 0x03, 0xf8, 0x08, 0x2c, 0x69, 0x52, 0x74, 0x86, 0x59, 0xaf, 0x6f, 0xcd, 0x8e, 0x2f, 0x73, + 0x94, 0xc3, 0xa5, 0x44, 0x9c, 0x88, 0x03, 0x6b, 0xae, 0x4c, 0x3d, 0x12, 0x70, 0x29, 0x91, 0xa4, + 0x16, 0x89, 0x6a, 0xe5, 0x44, 0x52, 0xa3, 0x12, 0x3d, 0x06, 0xad, 0xb2, 0x54, 0x6e, 0xd4, 0x3c, + 0x57, 0xc1, 0x92, 0x4a, 0xec, 0xd4, 0x16, 0x30, 0xe5, 0x11, 0xa8, 0x7d, 0x02, 0xe2, 0xc0, 0x04, + 0xaa, 0x76, 0xe9, 0x29, 0x58, 0x2d, 0xd3, 0x8a, 0x44, 0x75, 0xce, 0x6f, 0x95, 0xf8, 0x2a, 0x4f, + 0xe1, 0xae, 0x3a, 0x3f, 0xd0, 0xdd, 0x55, 0xe3, 0xc2, 0xfd, 0x5e, 0xdf, 0x05, 0xdd, 0x7d, 0xac, + 0xed, 0x0e, 0x80, 0xba, 0x4c, 0x76, 0x35, 0xb9, 0xa2, 0xa1, 0x29, 0x46, 0x4d, 0xe5, 0xc3, 0xa3, + 0x9a, 0x2e, 0x8a, 0x2c, 0x02, 0xd5, 0x9a, 0x96, 0x69, 0x45, 0x96, 0x86, 0xc8, 0x52, 0xe2, 0x6b, + 0x4d, 0xa5, 0x4c, 0x35, 0x5d, 0xd2, 0xdd, 0xb5, 0xa6, 0x65, 0x5a, 0xe1, 0x0e, 0x75, 0xf7, 0xfb, + 0x4d, 0x75, 0x99, 0x6c, 0xda, 0x14, 0x4d, 0x35, 0x85, 0x68, 0xfa, 0x0c, 0x58, 0x81, 0xc7, 0x3c, + 0x14, 0x90, 0xcf, 0x49, 0x4c, 0xbc, 0x40, 0x5f, 0xa5, 0xc5, 0x35, 0xcb, 0xf9, 0xfc, 0x70, 0x34, + 0x56, 0xcb, 0x1c, 0x83, 0x46, 0x88, 0x19, 0xf2, 0xe2, 0x18, 0xa9, 0xfb, 0xc9, 0x5a, 0xe6, 0xaf, + 0xe4, 0x35, 0x47, 0x5c, 0x60, 0x8e, 0xba, 0xc0, 0x9c, 0x43, 0x49, 0xd8, 0xaf, 0xe5, 0xaf, 0x85, + 0xef, 0xbf, 0x36, 0x0c, 0xd7, 0x0c, 0x31, 0xdb, 0x8b, 0x63, 0x35, 0x81, 0xaf, 0x81, 0xc9, 0x7f, + 0x9a, 0x85, 0xd9, 0xca, 0xff, 0x9b, 0x2d, 0x70, 0xe9, 0x68, 0xb0, 0x75, 0xfd, 0xc7, 0xae, 0x5c, + 0xdf, 0xda, 0xc6, 0xcd, 0xad, 0x6d, 0xfc, 0xbe, 0xb5, 0x8d, 0x6f, 0x77, 0x76, 0xe5, 0xe6, 0xce, + 0xae, 0xfc, 0xb8, 0xb3, 0x2b, 0x1f, 0xd4, 0xed, 0xeb, 0xcf, 0x72, 0xcb, 0x27, 0xff, 0x02, 0x00, + 0x00, 0xff, 0xff, 0x49, 0xb6, 0x83, 0x90, 0xaa, 0x07, 0x00, 0x00, } func (m *SeriesRequestHints) Marshal() (dAtA []byte, err error) { @@ -679,10 +446,6 @@ func (m *SeriesRequestHints) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.EnableQueryStats { i-- if m.EnableQueryStats { @@ -730,10 +493,6 @@ func (m *SeriesResponseHints) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.QueryStats != nil { { size, err := m.QueryStats.MarshalToSizedBuffer(dAtA[:i]) @@ -783,10 +542,6 @@ func (m *Block) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Id) > 0 { i -= len(m.Id) copy(dAtA[i:], m.Id) @@ -817,10 +572,6 @@ func (m *LabelNamesRequestHints) MarshalToSizedBuffer(dAtA []byte) (int, error) _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.BlockMatchers) > 0 { for iNdEx := len(m.BlockMatchers) - 1; iNdEx >= 0; iNdEx-- { { @@ -858,10 +609,6 @@ func (m *LabelNamesResponseHints) MarshalToSizedBuffer(dAtA []byte) (int, error) _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.QueriedBlocks) > 0 { for iNdEx := len(m.QueriedBlocks) - 1; iNdEx >= 0; iNdEx-- { { @@ -899,10 +646,6 @@ func (m *LabelValuesRequestHints) MarshalToSizedBuffer(dAtA []byte) (int, error) _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.BlockMatchers) > 0 { for iNdEx := len(m.BlockMatchers) - 1; iNdEx >= 0; iNdEx-- { { @@ -940,10 +683,6 @@ func (m *LabelValuesResponseHints) MarshalToSizedBuffer(dAtA []byte) (int, error _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.QueriedBlocks) > 0 { for iNdEx := len(m.QueriedBlocks) - 1; iNdEx >= 0; iNdEx-- { { @@ -981,38 +720,26 @@ func (m *QueryStats) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.MergeDuration != nil { - { - size, err := m.MergeDuration.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintHints(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x1 - i-- - dAtA[i] = 0xb2 - } - if m.GetAllDuration != nil { - { - size, err := m.GetAllDuration.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintHints(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x1 - i-- - dAtA[i] = 0xaa - } + n2, err2 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.MergeDuration, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdDuration(m.MergeDuration):]) + if err2 != nil { + return 0, err2 + } + i -= n2 + i = encodeVarintHints(dAtA, i, uint64(n2)) + i-- + dAtA[i] = 0x1 + i-- + dAtA[i] = 0xb2 + n3, err3 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.GetAllDuration, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdDuration(m.GetAllDuration):]) + if err3 != nil { + return 0, err3 + } + i -= n3 + i = encodeVarintHints(dAtA, i, uint64(n3)) + i-- + dAtA[i] = 0x1 + i-- + dAtA[i] = 0xaa if m.DataDownloadedSizeSum != 0 { i = encodeVarintHints(dAtA, i, uint64(m.DataDownloadedSizeSum)) i-- @@ -1152,9 +879,6 @@ func (m *SeriesRequestHints) Size() (n int) { if m.EnableQueryStats { n += 2 } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1174,9 +898,6 @@ func (m *SeriesResponseHints) Size() (n int) { l = m.QueryStats.Size() n += 1 + l + sovHints(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1190,9 +911,6 @@ func (m *Block) Size() (n int) { if l > 0 { n += 1 + l + sovHints(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1208,9 +926,6 @@ func (m *LabelNamesRequestHints) Size() (n int) { n += 1 + l + sovHints(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1226,9 +941,6 @@ func (m *LabelNamesResponseHints) Size() (n int) { n += 1 + l + sovHints(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1244,9 +956,6 @@ func (m *LabelValuesRequestHints) Size() (n int) { n += 1 + l + sovHints(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1262,9 +971,6 @@ func (m *LabelValuesResponseHints) Size() (n int) { n += 1 + l + sovHints(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1334,17 +1040,10 @@ func (m *QueryStats) Size() (n int) { if m.DataDownloadedSizeSum != 0 { n += 2 + sovHints(uint64(m.DataDownloadedSizeSum)) } - if m.GetAllDuration != nil { - l = m.GetAllDuration.Size() - n += 2 + l + sovHints(uint64(l)) - } - if m.MergeDuration != nil { - l = m.MergeDuration.Size() - n += 2 + l + sovHints(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } + l = github_com_gogo_protobuf_types.SizeOfStdDuration(m.GetAllDuration) + n += 2 + l + sovHints(uint64(l)) + l = github_com_gogo_protobuf_types.SizeOfStdDuration(m.MergeDuration) + n += 2 + l + sovHints(uint64(l)) return n } @@ -1412,7 +1111,7 @@ func (m *SeriesRequestHints) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.BlockMatchers = append(m.BlockMatchers, &storepb.LabelMatcher{}) + m.BlockMatchers = append(m.BlockMatchers, storepb.LabelMatcher{}) if err := m.BlockMatchers[len(m.BlockMatchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1449,7 +1148,6 @@ func (m *SeriesRequestHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1517,7 +1215,7 @@ func (m *SeriesResponseHints) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.QueriedBlocks = append(m.QueriedBlocks, &Block{}) + m.QueriedBlocks = append(m.QueriedBlocks, Block{}) if err := m.QueriedBlocks[len(m.QueriedBlocks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1570,7 +1268,6 @@ func (m *SeriesResponseHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1653,7 +1350,6 @@ func (m *Block) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1721,7 +1417,7 @@ func (m *LabelNamesRequestHints) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.BlockMatchers = append(m.BlockMatchers, &storepb.LabelMatcher{}) + m.BlockMatchers = append(m.BlockMatchers, storepb.LabelMatcher{}) if err := m.BlockMatchers[len(m.BlockMatchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1738,7 +1434,6 @@ func (m *LabelNamesRequestHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1806,7 +1501,7 @@ func (m *LabelNamesResponseHints) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.QueriedBlocks = append(m.QueriedBlocks, &Block{}) + m.QueriedBlocks = append(m.QueriedBlocks, Block{}) if err := m.QueriedBlocks[len(m.QueriedBlocks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1823,7 +1518,6 @@ func (m *LabelNamesResponseHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1891,7 +1585,7 @@ func (m *LabelValuesRequestHints) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.BlockMatchers = append(m.BlockMatchers, &storepb.LabelMatcher{}) + m.BlockMatchers = append(m.BlockMatchers, storepb.LabelMatcher{}) if err := m.BlockMatchers[len(m.BlockMatchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1908,7 +1602,6 @@ func (m *LabelValuesRequestHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1976,7 +1669,7 @@ func (m *LabelValuesResponseHints) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.QueriedBlocks = append(m.QueriedBlocks, &Block{}) + m.QueriedBlocks = append(m.QueriedBlocks, Block{}) if err := m.QueriedBlocks[len(m.QueriedBlocks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1993,7 +1686,6 @@ func (m *LabelValuesResponseHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2441,10 +2133,7 @@ func (m *QueryStats) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.GetAllDuration == nil { - m.GetAllDuration = &protobuf.Duration{} - } - if err := m.GetAllDuration.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdDurationUnmarshal(&m.GetAllDuration, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -2477,10 +2166,7 @@ func (m *QueryStats) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.MergeDuration == nil { - m.MergeDuration = &protobuf.Duration{} - } - if err := m.MergeDuration.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdDurationUnmarshal(&m.MergeDuration, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -2496,7 +2182,6 @@ func (m *QueryStats) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/store/hintspb/hints.proto b/pkg/store/hintspb/hints.proto index 65fafc3515..69c60d2a7d 100644 --- a/pkg/store/hintspb/hints.proto +++ b/pkg/store/hintspb/hints.proto @@ -4,25 +4,36 @@ syntax = "proto3"; package hintspb; +import "gogoproto/gogo.proto"; import "google/protobuf/duration.proto"; import "store/storepb/types.proto"; option go_package = "hintspb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + message SeriesRequestHints { /// block_matchers is a list of label matchers that are evaluated against each single block's /// labels to filter which blocks get queried. If the list is empty, no per-block filtering /// is applied. - repeated thanos.LabelMatcher block_matchers = 1; + repeated thanos.LabelMatcher block_matchers = 1 [(gogoproto.nullable) = false]; bool enable_query_stats = 2; } message SeriesResponseHints { /// queried_blocks is the list of blocks that have been queried. - repeated Block queried_blocks = 1; + repeated Block queried_blocks = 1 [(gogoproto.nullable) = false]; /// query_stats contains statistics of querying store gateway. - QueryStats query_stats = 2; + QueryStats query_stats = 2 [(gogoproto.nullable) = true]; } message Block { @@ -34,24 +45,24 @@ message LabelNamesRequestHints { /// block_matchers is a list of label matchers that are evaluated against each single block's /// labels to filter which blocks get queried. If the list is empty, no per-block filtering /// is applied. - repeated thanos.LabelMatcher block_matchers = 1; + repeated thanos.LabelMatcher block_matchers = 1 [(gogoproto.nullable) = false]; } message LabelNamesResponseHints { /// queried_blocks is the list of blocks that have been queried. - repeated Block queried_blocks = 1; + repeated Block queried_blocks = 1 [(gogoproto.nullable) = false]; } message LabelValuesRequestHints { /// block_matchers is a list of label matchers that are evaluated against each single block's /// labels to filter which blocks get queried. If the list is empty, no per-block filtering /// is applied. - repeated thanos.LabelMatcher block_matchers = 1; + repeated thanos.LabelMatcher block_matchers = 1 [(gogoproto.nullable) = false]; } message LabelValuesResponseHints { /// queried_blocks is the list of blocks that have been queried. - repeated Block queried_blocks = 1; + repeated Block queried_blocks = 1 [(gogoproto.nullable) = false]; } /// QueryStats fields are unstable and might change in the future. @@ -80,6 +91,6 @@ message QueryStats { int64 chunks_fetch_count = 19; int64 data_downloaded_size_sum = 20; - google.protobuf.Duration get_all_duration = 21; - google.protobuf.Duration merge_duration = 22; + google.protobuf.Duration get_all_duration = 21 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; + google.protobuf.Duration merge_duration = 22 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; } \ No newline at end of file diff --git a/pkg/store/labelpb/label.go b/pkg/store/labelpb/label.go index 35e2f5a61a..14e8d2fbe3 100644 --- a/pkg/store/labelpb/label.go +++ b/pkg/store/labelpb/label.go @@ -48,7 +48,7 @@ func PromLabelSetsToString(lsets []labels.Labels) string { // ValidateLabels validates label names and values (checks for empty // names and values, out of order labels and duplicate label names) // Returns appropriate error if validation fails on a label. -func ValidateLabels(lbls []*Label) error { +func ValidateLabels(lbls []Label) error { if len(lbls) == 0 { return ErrEmptyLabels } @@ -79,23 +79,19 @@ func ValidateLabels(lbls []*Label) error { return nil } -func PromLabelsToLabelpbLabels(lbls labels.Labels) []*Label { +func PromLabelsToLabelpbLabels(lbls labels.Labels) []Label { if lbls.Len() == 0 { - return []*Label(nil) + return []Label(nil) } - lset := make([]*Label, 0, lbls.Len()) + lset := make([]Label, 0, lbls.Len()) lbls.Range(func(l labels.Label) { - lset = append(lset, &Label{Name: l.Name, Value: l.Value}) - }) - - sort.Slice(lset, func(i, j int) bool { - return lset[i].Compare(lset[j]) < 0 + lset = append(lset, Label{Name: l.Name, Value: l.Value}) }) return lset } -func LabelpbLabelsToPromLabels(lbls []*Label) labels.Labels { +func LabelpbLabelsToPromLabels(lbls []Label) labels.Labels { lblSlice := make([]string, 0, len(lbls)*2) for _, l := range lbls { lblSlice = append(lblSlice, l.Name, l.Value) @@ -107,7 +103,7 @@ func (l *Label) Equal(other Label) bool { return l.Name == other.Name && l.Value == other.Value } -func (m *Label) Compare(other *Label) int { +func (m *Label) Compare(other Label) int { if c := strings.Compare(m.Name, other.Name); c != 0 { return c } @@ -115,13 +111,10 @@ func (m *Label) Compare(other *Label) int { } func (m *LabelSet) PromLabels() labels.Labels { - if m == nil { - return labels.EmptyLabels() - } return LabelpbLabelsToPromLabels(m.Labels) } -func LabelpbLabelSetsToPromLabels(lss ...*LabelSet) []labels.Labels { +func LabelpbLabelSetsToPromLabels(lss ...LabelSet) []labels.Labels { res := make([]labels.Labels, 0, len(lss)) for _, ls := range lss { res = append(res, ls.PromLabels()) @@ -130,7 +123,7 @@ func LabelpbLabelSetsToPromLabels(lss ...*LabelSet) []labels.Labels { } // HashWithPrefix returns a hash for the given prefix and labels. -func HashWithPrefix(prefix string, lbls []*Label) uint64 { +func HashWithPrefix(prefix string, lbls []Label) uint64 { // Use xxhash.Sum64(b) for fast path as it's faster. b := make([]byte, 0, 1024) b = append(b, prefix...) @@ -157,7 +150,7 @@ func HashWithPrefix(prefix string, lbls []*Label) uint64 { return xxhash.Sum64(b) } -type LabelSets []*LabelSet +type LabelSets []LabelSet func (z LabelSets) Len() int { return len(z) } @@ -199,8 +192,5 @@ func (m *LabelSet) UnmarshalJSON(entry []byte) error { } func (m *LabelSet) MarshalJSON() ([]byte, error) { - if m == nil || len(m.Labels) == 0 { - return []byte("{}"), nil - } return m.PromLabels().MarshalJSON() } diff --git a/pkg/store/labelpb/types.pb.go b/pkg/store/labelpb/types.pb.go index fd20605198..8173df058c 100644 --- a/pkg/store/labelpb/types.pb.go +++ b/pkg/store/labelpb/types.pb.go @@ -9,6 +9,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" ) @@ -24,11 +25,8 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type Label struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (m *Label) Reset() { *m = Label{} } @@ -64,25 +62,8 @@ func (m *Label) XXX_DiscardUnknown() { var xxx_messageInfo_Label proto.InternalMessageInfo -func (m *Label) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *Label) GetValue() string { - if m != nil { - return m.Value - } - return "" -} - type LabelSet struct { - Labels []*Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels"` } func (m *LabelSet) Reset() { *m = LabelSet{} } @@ -118,13 +99,6 @@ func (m *LabelSet) XXX_DiscardUnknown() { var xxx_messageInfo_LabelSet proto.InternalMessageInfo -func (m *LabelSet) GetLabels() []*Label { - if m != nil { - return m.Labels - } - return nil -} - func init() { proto.RegisterType((*Label)(nil), "thanos.Label") proto.RegisterType((*LabelSet)(nil), "thanos.LabelSet") @@ -133,17 +107,19 @@ func init() { func init() { proto.RegisterFile("store/labelpb/types.proto", fileDescriptor_cdcc9e7dae4870e8) } var fileDescriptor_cdcc9e7dae4870e8 = []byte{ - // 155 bytes of a gzipped FileDescriptorProto + // 191 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x2c, 0x2e, 0xc9, 0x2f, 0x4a, 0xd5, 0xcf, 0x49, 0x4c, 0x4a, 0xcd, 0x29, 0x48, 0xd2, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, - 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x2b, 0xc9, 0x48, 0xcc, 0xcb, 0x2f, 0x56, 0x32, 0xe4, - 0x62, 0xf5, 0x01, 0x49, 0x0b, 0x09, 0x71, 0xb1, 0xe4, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, - 0x6a, 0x70, 0x06, 0x81, 0xd9, 0x42, 0x22, 0x5c, 0xac, 0x65, 0x89, 0x39, 0xa5, 0xa9, 0x12, 0x4c, - 0x60, 0x41, 0x08, 0x47, 0xc9, 0x90, 0x8b, 0x03, 0xac, 0x25, 0x38, 0xb5, 0x44, 0x48, 0x95, 0x8b, - 0x0d, 0x6c, 0x7a, 0xb1, 0x04, 0xa3, 0x02, 0xb3, 0x06, 0xb7, 0x11, 0xaf, 0x1e, 0xc4, 0x5c, 0x3d, - 0xb0, 0x8a, 0x20, 0xa8, 0xa4, 0x93, 0xe8, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, - 0x78, 0x24, 0xc7, 0x18, 0xc5, 0x0e, 0x75, 0x50, 0x12, 0x1b, 0xd8, 0x2d, 0xc6, 0x80, 0x00, 0x00, - 0x00, 0xff, 0xff, 0xc9, 0xef, 0x5a, 0xce, 0xa8, 0x00, 0x00, 0x00, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x2b, 0xc9, 0x48, 0xcc, 0xcb, 0x2f, 0x96, 0x12, 0x49, + 0xcf, 0x4f, 0xcf, 0x07, 0x0b, 0xe9, 0x83, 0x58, 0x10, 0x59, 0x25, 0x43, 0x2e, 0x56, 0x1f, 0x90, + 0x26, 0x21, 0x21, 0x2e, 0x96, 0xbc, 0xc4, 0xdc, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, + 0x30, 0x5b, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7, 0x34, 0x55, 0x82, 0x09, 0x2c, 0x08, 0xe1, + 0x28, 0x99, 0x73, 0x71, 0x80, 0xb5, 0x04, 0xa7, 0x96, 0x08, 0x69, 0x73, 0xb1, 0x81, 0xed, 0x2c, + 0x96, 0x60, 0x54, 0x60, 0xd6, 0xe0, 0x36, 0xe2, 0xd5, 0x83, 0xd8, 0xa6, 0x07, 0x56, 0xe1, 0xc4, + 0x72, 0xe2, 0x9e, 0x3c, 0x43, 0x10, 0x54, 0x89, 0x93, 0xea, 0x89, 0x87, 0x72, 0x0c, 0x27, 0x1e, + 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, + 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xc5, 0x0e, 0x75, 0x7c, 0x12, 0x1b, 0xd8, 0x65, + 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x44, 0x87, 0xd3, 0x2f, 0xd4, 0x00, 0x00, 0x00, } func (m *Label) Marshal() (dAtA []byte, err error) { @@ -166,10 +142,6 @@ func (m *Label) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Value) > 0 { i -= len(m.Value) copy(dAtA[i:], m.Value) @@ -207,10 +179,6 @@ func (m *LabelSet) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Labels) > 0 { for iNdEx := len(m.Labels) - 1; iNdEx >= 0; iNdEx-- { { @@ -253,9 +221,6 @@ func (m *Label) Size() (n int) { if l > 0 { n += 1 + l + sovTypes(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -271,9 +236,6 @@ func (m *LabelSet) Size() (n int) { n += 1 + l + sovTypes(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -388,7 +350,6 @@ func (m *Label) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -456,7 +417,7 @@ func (m *LabelSet) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &Label{}) + m.Labels = append(m.Labels, Label{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -473,7 +434,6 @@ func (m *LabelSet) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/store/labelpb/types.proto b/pkg/store/labelpb/types.proto index 2db41b5c95..fc732dd6df 100644 --- a/pkg/store/labelpb/types.proto +++ b/pkg/store/labelpb/types.proto @@ -6,11 +6,24 @@ package thanos; option go_package = "labelpb"; +import "gogoproto/gogo.proto"; + +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + message Label { string name = 1; string value = 2; } message LabelSet { - repeated Label labels = 1; + repeated Label labels = 1 [(gogoproto.nullable) = false]; } \ No newline at end of file diff --git a/pkg/store/local.go b/pkg/store/local.go index fd33370ab9..30a39c21a1 100644 --- a/pkg/store/local.go +++ b/pkg/store/local.go @@ -163,7 +163,7 @@ func (s *LocalStore) Series(r *storepb.SeriesRequest, srv storepb.Store_SeriesSe resp := &storepb.Series{ // Copy labels as in-process clients like proxy tend to work on same memory for labels. Labels: series.Labels, - Chunks: make([]*storepb.AggrChunk, 0, len(s.sortedChunks[si])), + Chunks: make([]storepb.AggrChunk, 0, len(s.sortedChunks[si])), } for _, ci := range s.sortedChunks[si] { diff --git a/pkg/store/prometheus.go b/pkg/store/prometheus.go index 3bac17e12d..3148738b12 100644 --- a/pkg/store/prometheus.go +++ b/pkg/store/prometheus.go @@ -323,11 +323,11 @@ func (p *PrometheusStore) handleStreamedPrometheusResponse( } seriesStats.CountSeries(series.Labels) - thanosChks := make([]*storepb.AggrChunk, len(series.Chunks)) + thanosChks := make([]storepb.AggrChunk, len(series.Chunks)) for i, chk := range series.Chunks { chkHash := hashChunk(hasher, chk.Data, calculateChecksums) - thanosChks[i] = &storepb.AggrChunk{ + thanosChks[i] = storepb.AggrChunk{ MaxTime: chk.MaxTimeMs, MinTime: chk.MinTimeMs, Raw: &storepb.Chunk{ @@ -418,7 +418,7 @@ func (p *PrometheusStore) fetchSampledResponse(ctx context.Context, resp *http.R return &data, nil } -func (p *PrometheusStore) chunkSamples(series *prompb.TimeSeries, maxSamplesPerChunk int, calculateChecksums bool) (chks []*storepb.AggrChunk, err error) { +func (p *PrometheusStore) chunkSamples(series *prompb.TimeSeries, maxSamplesPerChunk int, calculateChecksums bool) (chks []storepb.AggrChunk, err error) { samples := series.Samples hasher := hashPool.Get().(hash.Hash64) defer hashPool.Put(hasher) @@ -435,7 +435,7 @@ func (p *PrometheusStore) chunkSamples(series *prompb.TimeSeries, maxSamplesPerC } chkHash := hashChunk(hasher, cb, calculateChecksums) - chks = append(chks, &storepb.AggrChunk{ + chks = append(chks, storepb.AggrChunk{ MinTime: samples[0].Timestamp, MaxTime: samples[chunkSize-1].Timestamp, Raw: &storepb.Chunk{Type: enc, Data: cb, Hash: chkHash}, @@ -487,7 +487,7 @@ func (p *PrometheusStore) startPromRemoteRead(ctx context.Context, q *prompb.Que // matchesExternalLabels returns false if given matchers are not matching external labels. // If true, matchesExternalLabels also returns Prometheus matchers without those matching external labels. -func matchesExternalLabels(ms []*storepb.LabelMatcher, externalLabels labels.Labels) (bool, []*labels.Matcher, error) { +func matchesExternalLabels(ms []storepb.LabelMatcher, externalLabels labels.Labels) (bool, []*labels.Matcher, error) { tms, err := storepb.MatchersToPromMatchers(ms...) if err != nil { return false, nil, err @@ -519,7 +519,7 @@ func matchesExternalLabels(ms []*storepb.LabelMatcher, externalLabels labels.Lab // encodeChunk translates the sample pairs into a chunk. // TODO(kakkoyun): Linter - result 0 (github.com/thanos-io/thanos/pkg/store/storepb.Chunk_Encoding) is always 0. -func (p *PrometheusStore) encodeChunk(ss []*prompb.Sample) (storepb.Chunk_Encoding, []byte, error) { //nolint:unparam +func (p *PrometheusStore) encodeChunk(ss []prompb.Sample) (storepb.Chunk_Encoding, []byte, error) { //nolint:unparam c := chunkenc.NewXORChunk() a, err := c.Appender() @@ -654,12 +654,12 @@ func (p *PrometheusStore) LabelValues(ctx context.Context, r *storepb.LabelValue return &storepb.LabelValuesResponse{Values: vals}, nil } -func (p *PrometheusStore) LabelSet() []*labelpb.LabelSet { +func (p *PrometheusStore) LabelSet() []labelpb.LabelSet { labels := labelpb.PromLabelsToLabelpbLabels(p.externalLabelsFn()) - labelset := []*labelpb.LabelSet{} + labelset := []labelpb.LabelSet{} if len(labels) > 0 { - labelset = append(labelset, &labelpb.LabelSet{ + labelset = append(labelset, labelpb.LabelSet{ Labels: labels, }) } @@ -667,16 +667,16 @@ func (p *PrometheusStore) LabelSet() []*labelpb.LabelSet { return labelset } -func (p *PrometheusStore) TSDBInfos() []*infopb.TSDBInfo { +func (p *PrometheusStore) TSDBInfos() []infopb.TSDBInfo { labels := p.LabelSet() if len(labels) == 0 { - return []*infopb.TSDBInfo{} + return []infopb.TSDBInfo{} } mint, maxt := p.Timestamps() - return []*infopb.TSDBInfo{ + return []infopb.TSDBInfo{ { - Labels: &labelpb.LabelSet{ + Labels: labelpb.LabelSet{ Labels: labels[0].Labels, }, MinTime: mint, diff --git a/pkg/store/prometheus_test.go b/pkg/store/prometheus_test.go index b9a70ac8f3..5a31ccbf17 100644 --- a/pkg/store/prometheus_test.go +++ b/pkg/store/prometheus_test.go @@ -82,14 +82,14 @@ func testPrometheusStoreSeriesE2e(t *testing.T, prefix string) { testutil.Ok(t, proxy.Series(&storepb.SeriesRequest{ MinTime: baseT + 101, MaxTime: baseT + 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, }, }, srv)) testutil.Equals(t, 1, len(srv.SeriesSet)) - testutil.Equals(t, []*labelpb.Label{ + testutil.Equals(t, []labelpb.Label{ {Name: "a", Value: "b"}, {Name: "region", Value: "eu-west"}, }, srv.SeriesSet[0].Labels) @@ -113,7 +113,7 @@ func testPrometheusStoreSeriesE2e(t *testing.T, prefix string) { testutil.Ok(t, proxy.Series(&storepb.SeriesRequest{ MinTime: 0, MaxTime: baseT + 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, }, }, srv)) @@ -122,7 +122,7 @@ func testPrometheusStoreSeriesE2e(t *testing.T, prefix string) { testutil.Equals(t, 1, len(srv.SeriesSet)) - testutil.Equals(t, []*labelpb.Label{ + testutil.Equals(t, []labelpb.Label{ {Name: "a", Value: "b"}, {Name: "region", Value: "eu-west"}, }, srv.SeriesSet[0].Labels) @@ -146,7 +146,7 @@ func testPrometheusStoreSeriesE2e(t *testing.T, prefix string) { err = proxy.Series(&storepb.SeriesRequest{ MinTime: baseT + 101, MaxTime: baseT + 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}, }, }, srv) @@ -212,7 +212,7 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{}, + Matchers: []storepb.LabelMatcher{}, MinTime: baseT - 10000000000, MaxTime: baseT + 10000000000, }, @@ -221,7 +221,7 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "non_existing", Value: "something"}, }, MinTime: baseT - 10000000000, @@ -231,7 +231,7 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, }, MinTime: baseT, @@ -239,14 +239,14 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { }, expected: []storepb.Series{ { - Labels: []*labelpb.Label{{Name: "a", Value: "b"}, {Name: "b", Value: "d"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "b"}, {Name: "b", Value: "d"}, {Name: "region", Value: "eu-west"}}, }, }, }, { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "job", Value: "foo"}, }, MinTime: baseT, @@ -256,7 +256,7 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "a", Value: "b"}, {Type: storepb.LabelMatcher_EQ, Name: "job", Value: "test"}, }, @@ -265,17 +265,17 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { }, expected: []storepb.Series{ { - Labels: []*labelpb.Label{{Name: "a", Value: "c"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "c"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, { - Labels: []*labelpb.Label{{Name: "a", Value: "d"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "d"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, }, }, { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "job", Value: "test"}, }, MinTime: baseT, @@ -283,17 +283,17 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { }, expected: []storepb.Series{ { - Labels: []*labelpb.Label{{Name: "a", Value: "c"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "c"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, { - Labels: []*labelpb.Label{{Name: "a", Value: "d"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "d"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, }, }, { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "job", Value: "test"}, }, MinTime: baseT + 400, @@ -301,14 +301,14 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { }, expected: []storepb.Series{ { - Labels: []*labelpb.Label{{Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, }, }, { req: &storepb.SeriesRequest{ SkipChunks: true, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "job", Value: "test"}, }, MinTime: func() int64 { minTime, _ := promStore.timestamps(); return minTime }(), @@ -316,13 +316,13 @@ func TestPrometheusStore_SeriesLabels_e2e(t *testing.T) { }, expected: []storepb.Series{ { - Labels: []*labelpb.Label{{Name: "a", Value: "c"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "c"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, { - Labels: []*labelpb.Label{{Name: "a", Value: "d"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "a", Value: "d"}, {Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, { - Labels: []*labelpb.Label{{Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, + Labels: []labelpb.Label{{Name: "b", Value: "d"}, {Name: "job", Value: "test"}, {Name: "region", Value: "eu-west"}}, }, }, }, @@ -378,14 +378,14 @@ func TestPrometheusStore_Series_MatchExternalLabel(t *testing.T) { testutil.Ok(t, proxy.Series(&storepb.SeriesRequest{ MinTime: baseT + 101, MaxTime: baseT + 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, {Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}, }, }, srv)) testutil.Equals(t, 1, len(srv.SeriesSet)) - testutil.Equals(t, []*labelpb.Label{ + testutil.Equals(t, []labelpb.Label{ {Name: "a", Value: "b"}, {Name: "region", Value: "eu-west"}, }, srv.SeriesSet[0].Labels) @@ -395,7 +395,7 @@ func TestPrometheusStore_Series_MatchExternalLabel(t *testing.T) { testutil.Ok(t, proxy.Series(&storepb.SeriesRequest{ MinTime: baseT + 101, MaxTime: baseT + 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, {Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west2"}, // Non existing label value. }, @@ -441,7 +441,7 @@ func TestPrometheusStore_Series_ChunkHashCalculation_Integration(t *testing.T) { testutil.Ok(t, proxy.Series(&storepb.SeriesRequest{ MinTime: baseT + 101, MaxTime: baseT + 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Name: "a", Value: "b"}, {Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}, }, diff --git a/pkg/store/proxy.go b/pkg/store/proxy.go index 240cfbd2eb..e3a22183bc 100644 --- a/pkg/store/proxy.go +++ b/pkg/store/proxy.go @@ -56,7 +56,7 @@ type Client interface { TimeRange() (mint int64, maxt int64) // TSDBInfos returns metadata about each TSDB backed by the client. - TSDBInfos() []*infopb.TSDBInfo + TSDBInfos() []infopb.TSDBInfo // SupportsSharding returns true if sharding is supported by the underlying store. SupportsSharding() bool @@ -165,10 +165,10 @@ func NewProxyStore( return s } -func (s *ProxyStore) LabelSet() []*labelpb.LabelSet { +func (s *ProxyStore) LabelSet() []labelpb.LabelSet { stores := s.stores() if len(stores) == 0 { - return []*labelpb.LabelSet{} + return []labelpb.LabelSet{} } mergedLabelSets := make(map[uint64]labelpb.LabelSet, len(stores)) @@ -179,11 +179,9 @@ func (s *ProxyStore) LabelSet() []*labelpb.LabelSet { } } - labelSets := make([]*labelpb.LabelSet, 0, len(mergedLabelSets)) + labelSets := make([]labelpb.LabelSet, 0, len(mergedLabelSets)) for _, v := range mergedLabelSets { - v := v - - labelSets = append(labelSets, &v) + labelSets = append(labelSets, v) } // We always want to enforce announcing the subset of data that @@ -192,7 +190,7 @@ func (s *ProxyStore) LabelSet() []*labelpb.LabelSet { // announcing this subset by announcing the selector as the label-set. selectorLabels := labelpb.PromLabelsToLabelpbLabels(s.selectorLabels) if len(labelSets) == 0 && len(selectorLabels) > 0 { - labelSets = append(labelSets, &labelpb.LabelSet{Labels: selectorLabels}) + labelSets = append(labelSets, labelpb.LabelSet{Labels: selectorLabels}) } return labelSets @@ -218,8 +216,8 @@ func (s *ProxyStore) TimeRange() (int64, int64) { return minTime, maxTime } -func (s *ProxyStore) TSDBInfos() []*infopb.TSDBInfo { - infos := make([]*infopb.TSDBInfo, 0) +func (s *ProxyStore) TSDBInfos() []infopb.TSDBInfo { + infos := make([]infopb.TSDBInfo, 0) for _, st := range s.stores() { matches, _ := s.tsdbSelector.MatchLabelSets(st.LabelSets()...) if !matches { diff --git a/pkg/store/proxy_merge.go b/pkg/store/proxy_merge.go index bf940a28d1..aaf94efbb3 100644 --- a/pkg/store/proxy_merge.go +++ b/pkg/store/proxy_merge.go @@ -37,7 +37,7 @@ type responseDeduplicator struct { prev *storepb.SeriesResponse ok bool - chunkDedupMap map[uint64]*storepb.AggrChunk + chunkDedupMap map[uint64]storepb.AggrChunk } // NewResponseDeduplicator returns a wrapper around a loser tree that merges duplicated series messages into one. @@ -52,7 +52,7 @@ func NewResponseDeduplicator(h *losertree.Tree[*storepb.SeriesResponse, respSet] h: h, ok: ok, prev: prev, - chunkDedupMap: make(map[uint64]*storepb.AggrChunk), + chunkDedupMap: make(map[uint64]storepb.AggrChunk), } } @@ -154,7 +154,7 @@ func (d *responseDeduplicator) chainSeriesAndRemIdenticalChunks(series []*storep } sort.Slice(finalChunks, func(i, j int) bool { - return finalChunks[i].Compare(*finalChunks[j]) > 0 + return finalChunks[i].Compare(finalChunks[j]) > 0 }) return storepb.NewSeriesResponse(&storepb.Series{ diff --git a/pkg/store/proxy_test.go b/pkg/store/proxy_test.go index f57d830c3f..aaf1869f4d 100644 --- a/pkg/store/proxy_test.go +++ b/pkg/store/proxy_test.go @@ -63,13 +63,13 @@ func TestProxyStore_TSDBInfos(t *testing.T) { StoreTSDBInfos: nil, }, &storetestutil.TestClient{ - StoreTSDBInfos: []*infopb.TSDBInfo{ - infopb.NewTSDBInfo(0, 10, []*labelpb.Label{{Name: "lbl", Value: "val1"}}), + StoreTSDBInfos: []infopb.TSDBInfo{ + infopb.NewTSDBInfo(0, 10, []labelpb.Label{{Name: "lbl", Value: "val1"}}), }, }, &storetestutil.TestClient{ - StoreTSDBInfos: []*infopb.TSDBInfo{ - infopb.NewTSDBInfo(0, 20, []*labelpb.Label{{Name: "lbl", Value: "val2"}}), + StoreTSDBInfos: []infopb.TSDBInfo{ + infopb.NewTSDBInfo(0, 20, []labelpb.Label{{Name: "lbl", Value: "val2"}}), }, }, } @@ -78,9 +78,9 @@ func TestProxyStore_TSDBInfos(t *testing.T) { component.Query, labels.EmptyLabels(), 0*time.Second, EagerRetrieval, ) - expected := []*infopb.TSDBInfo{ - infopb.NewTSDBInfo(0, 10, []*labelpb.Label{{Name: "lbl", Value: "val1"}}), - infopb.NewTSDBInfo(0, 20, []*labelpb.Label{{Name: "lbl", Value: "val2"}}), + expected := []infopb.TSDBInfo{ + infopb.NewTSDBInfo(0, 10, []labelpb.Label{{Name: "lbl", Value: "val1"}}), + infopb.NewTSDBInfo(0, 20, []labelpb.Label{{Name: "lbl", Value: "val2"}}), } testutil.Equals(t, expected, q.TSDBInfos()) } @@ -106,7 +106,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "a", Value: "a", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "a", Value: "a", Type: storepb.LabelMatcher_EQ}}, }, expectedWarningsLen: 0, // No store matched for this query. }, @@ -126,7 +126,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 301, MaxTime: 400, - Matchers: []*storepb.LabelMatcher{{Name: "a", Value: "a", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "a", Value: "a", Type: storepb.LabelMatcher_EQ}}, }, expectedWarningsLen: 0, // No store matched for this query. }, @@ -147,7 +147,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "2", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "2", Type: storepb.LabelMatcher_EQ}}, }, expectedWarningsLen: 0, // No store matched for this query. }, @@ -168,7 +168,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -193,7 +193,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -219,7 +219,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, }, { @@ -238,7 +238,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "a", Value: "b", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "a", Value: "b", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -304,7 +304,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -349,7 +349,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -391,7 +391,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -435,7 +435,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -470,7 +470,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -506,7 +506,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -530,7 +530,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, storeDebugMatchers: [][]*labels.Matcher{{labels.MustNewMatcher(labels.MatchEqual, "__address__", "testaddr")}}, @@ -559,7 +559,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, storeDebugMatchers: [][]*labels.Matcher{{labels.MustNewMatcher(labels.MatchEqual, "__address__", "foo")}}, expectedWarningsLen: 0, // No stores match. @@ -583,7 +583,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, ShardInfo: &storepb.ShardInfo{ ShardIndex: 0, TotalShards: 2, @@ -629,7 +629,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Name: "zone", Value: ".+", Type: storepb.LabelMatcher_RE}, }, }, @@ -695,7 +695,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Name: "zone", Value: ".+", Type: storepb.LabelMatcher_RE}, }, }, @@ -746,7 +746,7 @@ func TestProxyStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "a", Value: "[1-4]", Type: storepb.LabelMatcher_RE}}, + Matchers: []storepb.LabelMatcher{{Name: "a", Value: "[1-4]", Type: storepb.LabelMatcher_RE}}, SkipChunks: true, Limit: 2, }, @@ -872,7 +872,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -908,7 +908,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -944,7 +944,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -983,7 +983,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -1022,7 +1022,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -1058,7 +1058,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -1098,7 +1098,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -1161,7 +1161,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -1208,7 +1208,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -1242,7 +1242,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: true, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, }, @@ -1287,7 +1287,7 @@ func TestProxyStore_SeriesSlowStores(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, }, expectedSeries: []rawSeries{ { @@ -1377,7 +1377,7 @@ func TestProxyStore_Series_RequestParamsProxied(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, + Matchers: []storepb.LabelMatcher{{Name: "ext", Value: "1", Type: storepb.LabelMatcher_EQ}}, PartialResponseDisabled: false, Aggregates: []storepb.Aggr{ storepb.Aggr_COUNTER, @@ -1439,7 +1439,7 @@ func TestProxyStore_Series_RegressionFillResponseChannel(t *testing.T) { &storepb.SeriesRequest{ MinTime: 1, MaxTime: 300, - Matchers: []*storepb.LabelMatcher{{Name: "any", Value: ".*", Type: storepb.LabelMatcher_RE}}, + Matchers: []storepb.LabelMatcher{{Name: "any", Value: ".*", Type: storepb.LabelMatcher_RE}}, PartialResponseStrategy: storepb.PartialResponseStrategy_WARN, }, s, )) @@ -2036,7 +2036,7 @@ func storeSeriesResponse(t testing.TB, lset labels.Labels, smplChunks ...[]sampl Raw: &storepb.Chunk{Type: storepb.Chunk_XOR, Data: c.Bytes()}, } - s.Chunks = append(s.Chunks, &ch) + s.Chunks = append(s.Chunks, ch) } return storepb.NewSeriesResponse(&s) } @@ -2144,7 +2144,7 @@ func benchProxySeries(t testutil.TB, totalSamples, totalSeries int) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: maxTime, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, }, @@ -2177,7 +2177,7 @@ func benchProxySeries(t testutil.TB, totalSamples, totalSeries int) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: maxTime, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, }, @@ -2242,7 +2242,7 @@ func TestProxyStore_NotLeakingOnPrematureFinish(t *testing.T) { t.Run("failing send", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) // We mimic failing series server, but practically context cancel will do the same. - testutil.NotOk(t, p.Series(&storepb.SeriesRequest{Matchers: []*storepb.LabelMatcher{{}}, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT}, &mockedSeriesServer{ + testutil.NotOk(t, p.Series(&storepb.SeriesRequest{Matchers: []storepb.LabelMatcher{{}}, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT}, &mockedSeriesServer{ ctx: ctx, send: func(*storepb.SeriesResponse) error { cancel() @@ -2304,7 +2304,7 @@ func TestDedupRespHeap_Deduplication(t *testing.T) { Result: &storepb.SeriesResponse_Series{ Series: &storepb.Series{ Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("foo", "bar")), - Chunks: []*storepb.AggrChunk{ + Chunks: []storepb.AggrChunk{ { Raw: &storepb.Chunk{ Type: storepb.Chunk_XOR, @@ -2330,7 +2330,7 @@ func TestDedupRespHeap_Deduplication(t *testing.T) { Result: &storepb.SeriesResponse_Series{ Series: &storepb.Series{ Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("foo", "bar")), - Chunks: []*storepb.AggrChunk{ + Chunks: []storepb.AggrChunk{ { Raw: &storepb.Chunk{ Type: storepb.Chunk_XOR, @@ -2345,7 +2345,7 @@ func TestDedupRespHeap_Deduplication(t *testing.T) { Result: &storepb.SeriesResponse_Series{ Series: &storepb.Series{ Labels: labelpb.PromLabelsToLabelpbLabels(labels.FromStrings("foo", "bar")), - Chunks: []*storepb.AggrChunk{ + Chunks: []storepb.AggrChunk{ { Raw: &storepb.Chunk{ Type: storepb.Chunk_XOR, diff --git a/pkg/store/storepb/custom.go b/pkg/store/storepb/custom.go index 9d02f392c4..499538271f 100644 --- a/pkg/store/storepb/custom.go +++ b/pkg/store/storepb/custom.go @@ -64,9 +64,9 @@ func GRPCCodeFromWarn(warn string) codes.Code { type emptySeriesSet struct{} -func (emptySeriesSet) Next() bool { return false } -func (emptySeriesSet) At() (labels.Labels, []*AggrChunk) { return labels.EmptyLabels(), nil } -func (emptySeriesSet) Err() error { return nil } +func (emptySeriesSet) Next() bool { return false } +func (emptySeriesSet) At() (labels.Labels, []AggrChunk) { return labels.EmptyLabels(), nil } +func (emptySeriesSet) Err() error { return nil } // EmptySeriesSet returns a new series set that contains no series. func EmptySeriesSet() SeriesSet { @@ -105,7 +105,7 @@ func MergeSeriesSets(all ...SeriesSet) SeriesSet { // The set is sorted by the label sets. Chunks may be overlapping or expected of order. type SeriesSet interface { Next() bool - At() (labels.Labels, []*AggrChunk) + At() (labels.Labels, []AggrChunk) Err() error } @@ -114,7 +114,7 @@ type mergedSeriesSet struct { a, b SeriesSet lset labels.Labels - chunks []*AggrChunk + chunks []AggrChunk adone, bdone bool } @@ -128,7 +128,7 @@ func newMergedSeriesSet(a, b SeriesSet) *mergedSeriesSet { return s } -func (s *mergedSeriesSet) At() (labels.Labels, []*AggrChunk) { +func (s *mergedSeriesSet) At() (labels.Labels, []AggrChunk) { return s.lset, s.chunks } @@ -177,7 +177,7 @@ func (s *mergedSeriesSet) Next() bool { // Slice reuse is not generally safe with nested merge iterators. // We err on the safe side an create a new slice. - s.chunks = make([]*AggrChunk, 0, len(chksA)+len(chksB)) + s.chunks = make([]AggrChunk, 0, len(chksA)+len(chksB)) b := 0 Outer: @@ -189,7 +189,7 @@ Outer: break Outer } - cmp := chksA[a].Compare(*chksB[b]) + cmp := chksA[a].Compare(chksB[b]) if cmp > 0 { s.chunks = append(s.chunks, chksA[a]) break @@ -222,14 +222,14 @@ type uniqueSeriesSet struct { peek *Series lset labels.Labels - chunks []*AggrChunk + chunks []AggrChunk } func newUniqueSeriesSet(wrapped SeriesSet) *uniqueSeriesSet { return &uniqueSeriesSet{SeriesSet: wrapped} } -func (s *uniqueSeriesSet) At() (labels.Labels, []*AggrChunk) { +func (s *uniqueSeriesSet) At() (labels.Labels, []AggrChunk) { return s.lset, s.chunks } @@ -352,8 +352,8 @@ func (x *PartialResponseStrategy) MarshalJSON() ([]byte, error) { // PromMatchersToMatchers returns proto matchers from Prometheus matchers. // NOTE: It allocates memory. -func PromMatchersToMatchers(ms ...*labels.Matcher) ([]*LabelMatcher, error) { - res := make([]*LabelMatcher, 0, len(ms)) +func PromMatchersToMatchers(ms ...*labels.Matcher) ([]LabelMatcher, error) { + res := make([]LabelMatcher, 0, len(ms)) for _, m := range ms { var t LabelMatcher_Type @@ -369,14 +369,14 @@ func PromMatchersToMatchers(ms ...*labels.Matcher) ([]*LabelMatcher, error) { default: return nil, errors.Errorf("unrecognized matcher type %d", m.Type) } - res = append(res, &LabelMatcher{Type: t, Name: m.Name, Value: m.Value}) + res = append(res, LabelMatcher{Type: t, Name: m.Name, Value: m.Value}) } return res, nil } // MatchersToPromMatchers returns Prometheus matchers from proto matchers. // NOTE: It allocates memory. -func MatchersToPromMatchers(ms ...*LabelMatcher) ([]*labels.Matcher, error) { +func MatchersToPromMatchers(ms ...LabelMatcher) ([]*labels.Matcher, error) { res := make([]*labels.Matcher, 0, len(ms)) for _, m := range ms { var t labels.MatchType @@ -404,7 +404,7 @@ func MatchersToPromMatchers(ms ...*LabelMatcher) ([]*labels.Matcher, error) { // MatchersToString converts label matchers to string format. // String should be parsable as a valid PromQL query metric selector. -func MatchersToString(ms ...*LabelMatcher) string { +func MatchersToString(ms ...LabelMatcher) string { var res string for i, m := range ms { res += m.PromString() @@ -460,13 +460,13 @@ type LabelSet = labelpb.LabelSet // Deprecated. // TODO(bwplotka): Remove this once Cortex dep will stop using it. -func CompareLabels(a, b []*Label) int { +func CompareLabels(a, b []Label) int { return labels.Compare(labelpb.LabelpbLabelsToPromLabels(a), labelpb.LabelpbLabelsToPromLabels(b)) } // Deprecated. // TODO(bwplotka): Remove this once Cortex dep will stop using it. -func LabelsToPromLabelsUnsafe(lset []*Label) labels.Labels { +func LabelsToPromLabelsUnsafe(lset []Label) labels.Labels { return labelpb.LabelpbLabelsToPromLabels(lset) } @@ -486,7 +486,7 @@ type SeriesStatsCounter struct { Samples int } -func (c *SeriesStatsCounter) CountSeries(seriesLabels []*labelpb.Label) { +func (c *SeriesStatsCounter) CountSeries(seriesLabels []labelpb.Label) { seriesHash := labelpb.HashWithPrefix("", seriesLabels) if c.lastSeriesHash != 0 || seriesHash != c.lastSeriesHash { c.lastSeriesHash = seriesHash diff --git a/pkg/store/storepb/custom_test.go b/pkg/store/storepb/custom_test.go index 885bfed631..f31c8d6670 100644 --- a/pkg/store/storepb/custom_test.go +++ b/pkg/store/storepb/custom_test.go @@ -50,7 +50,7 @@ func newSeries(tb testing.TB, lset labels.Labels, smplChunks [][]sample) Series Raw: &Chunk{Type: Chunk_XOR, Data: c.Bytes()}, } - s.Chunks = append(s.Chunks, &ch) + s.Chunks = append(s.Chunks, ch) } return s } @@ -71,7 +71,7 @@ func (s *listSeriesSet) Next() bool { return s.idx < len(s.series) } -func (s *listSeriesSet) At() (labels.Labels, []*AggrChunk) { +func (s *listSeriesSet) At() (labels.Labels, []AggrChunk) { if s.idx < 0 || s.idx >= len(s.series) { return labels.EmptyLabels(), nil } @@ -85,7 +85,7 @@ type errSeriesSet struct{ err error } func (errSeriesSet) Next() bool { return false } -func (errSeriesSet) At() (labels.Labels, []*AggrChunk) { return labels.EmptyLabels(), nil } +func (errSeriesSet) At() (labels.Labels, []AggrChunk) { return labels.EmptyLabels(), nil } func (e errSeriesSet) Err() error { return e.err } @@ -468,37 +468,37 @@ func benchmarkMergedSeriesSet(b testutil.TB, overlappingChunks bool) { func TestMatchersToString_Translate(t *testing.T) { for _, c := range []struct { - ms []*LabelMatcher + ms []LabelMatcher expected string }{ { - ms: []*LabelMatcher{ + ms: []LabelMatcher{ {Name: "__name__", Type: LabelMatcher_EQ, Value: "up"}, }, expected: `{__name__="up"}`, }, { - ms: []*LabelMatcher{ + ms: []LabelMatcher{ {Name: "__name__", Type: LabelMatcher_NEQ, Value: "up"}, {Name: "job", Type: LabelMatcher_EQ, Value: "test"}, }, expected: `{__name__!="up", job="test"}`, }, { - ms: []*LabelMatcher{ + ms: []LabelMatcher{ {Name: "__name__", Type: LabelMatcher_EQ, Value: "up"}, {Name: "job", Type: LabelMatcher_RE, Value: "test"}, }, expected: `{__name__="up", job=~"test"}`, }, { - ms: []*LabelMatcher{ + ms: []LabelMatcher{ {Name: "job", Type: LabelMatcher_NRE, Value: "test"}, }, expected: `{job!~"test"}`, }, { - ms: []*LabelMatcher{ + ms: []LabelMatcher{ {Name: "__name__", Type: LabelMatcher_EQ, Value: "up"}, {Name: "__name__", Type: LabelMatcher_NEQ, Value: "up"}, }, @@ -541,7 +541,7 @@ func TestSeriesRequestToPromQL(t *testing.T) { { name: "Single matcher regular expression", r: &SeriesRequest{ - Matchers: []*LabelMatcher{ + Matchers: []LabelMatcher{ { Type: LabelMatcher_RE, Name: "namespace", @@ -559,7 +559,7 @@ func TestSeriesRequestToPromQL(t *testing.T) { { name: "Single matcher regular expression with grouping", r: &SeriesRequest{ - Matchers: []*LabelMatcher{ + Matchers: []LabelMatcher{ { Type: LabelMatcher_RE, Name: "namespace", @@ -581,7 +581,7 @@ func TestSeriesRequestToPromQL(t *testing.T) { { name: "Multiple matchers with grouping", r: &SeriesRequest{ - Matchers: []*LabelMatcher{ + Matchers: []LabelMatcher{ { Type: LabelMatcher_EQ, Name: "__name__", @@ -608,7 +608,7 @@ func TestSeriesRequestToPromQL(t *testing.T) { { name: "Query with vector range selector", r: &SeriesRequest{ - Matchers: []*LabelMatcher{ + Matchers: []LabelMatcher{ { Type: LabelMatcher_EQ, Name: "__name__", @@ -634,7 +634,7 @@ func TestSeriesRequestToPromQL(t *testing.T) { { name: "Query with grouping and vector range selector", r: &SeriesRequest{ - Matchers: []*LabelMatcher{ + Matchers: []LabelMatcher{ { Type: LabelMatcher_EQ, Name: "__name__", diff --git a/pkg/store/storepb/inprocess_test.go b/pkg/store/storepb/inprocess_test.go index 801038f426..dab3f5548c 100644 --- a/pkg/store/storepb/inprocess_test.go +++ b/pkg/store/storepb/inprocess_test.go @@ -58,17 +58,17 @@ func TestServerAsClient(t *testing.T) { s := &testStoreServer{ series: []*SeriesResponse{ NewSeriesResponse(&Series{ - Labels: []*labelpb.Label{{Name: "a", Value: "b"}}, - Chunks: []*AggrChunk{{MinTime: 123, MaxTime: 124}, {MinTime: 12455, MaxTime: 14124}}, + Labels: []labelpb.Label{{Name: "a", Value: "b"}}, + Chunks: []AggrChunk{{MinTime: 123, MaxTime: 124}, {MinTime: 12455, MaxTime: 14124}}, }), NewSeriesResponse(&Series{ - Labels: []*labelpb.Label{{Name: "a", Value: "b1"}}, - Chunks: []*AggrChunk{{MinTime: 1231, MaxTime: 124}, {MinTime: 12455, MaxTime: 14124}}, + Labels: []labelpb.Label{{Name: "a", Value: "b1"}}, + Chunks: []AggrChunk{{MinTime: 1231, MaxTime: 124}, {MinTime: 12455, MaxTime: 14124}}, }), NewWarnSeriesResponse(errors.New("yolo")), NewSeriesResponse(&Series{ - Labels: []*labelpb.Label{{Name: "a", Value: "b3"}}, - Chunks: []*AggrChunk{{MinTime: 123, MaxTime: 124}, {MinTime: 124554, MaxTime: 14124}}, + Labels: []labelpb.Label{{Name: "a", Value: "b3"}}, + Chunks: []AggrChunk{{MinTime: 123, MaxTime: 124}, {MinTime: 124554, MaxTime: 14124}}, }), }} t.Run("ok", func(t *testing.T) { @@ -76,7 +76,7 @@ func TestServerAsClient(t *testing.T) { r := &SeriesRequest{ MinTime: -214, MaxTime: 213, - Matchers: []*LabelMatcher{{Value: "wfsdfs", Name: "__name__", Type: LabelMatcher_EQ}}, + Matchers: []LabelMatcher{{Value: "wfsdfs", Name: "__name__", Type: LabelMatcher_EQ}}, PartialResponseStrategy: PartialResponseStrategy_ABORT, } client, err := ServerAsClient(s).Series(ctx, r) @@ -101,7 +101,7 @@ func TestServerAsClient(t *testing.T) { r := &SeriesRequest{ MinTime: -214, MaxTime: 213, - Matchers: []*LabelMatcher{{Value: "wfsdfs", Name: "__name__", Type: LabelMatcher_EQ}}, + Matchers: []LabelMatcher{{Value: "wfsdfs", Name: "__name__", Type: LabelMatcher_EQ}}, PartialResponseStrategy: PartialResponseStrategy_ABORT, } client, err := ServerAsClient(s).Series(ctx, r) @@ -129,7 +129,7 @@ func TestServerAsClient(t *testing.T) { r := &SeriesRequest{ MinTime: -214, MaxTime: 213, - Matchers: []*LabelMatcher{{Value: "wfsdfs", Name: "__name__", Type: LabelMatcher_EQ}}, + Matchers: []LabelMatcher{{Value: "wfsdfs", Name: "__name__", Type: LabelMatcher_EQ}}, PartialResponseStrategy: PartialResponseStrategy_ABORT, } client, err := ServerAsClient(s).Series(ctx, r) diff --git a/pkg/store/storepb/prompb/remote.pb.go b/pkg/store/storepb/prompb/remote.pb.go index 9fb894e090..6d7538d1bc 100644 --- a/pkg/store/storepb/prompb/remote.pb.go +++ b/pkg/store/storepb/prompb/remote.pb.go @@ -9,6 +9,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" ) @@ -61,11 +62,8 @@ func (ReadRequest_ResponseType) EnumDescriptor() ([]byte, []int) { } type WriteRequest struct { - Timeseries []*TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries,omitempty"` - Metadata []*MetricMetadata `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timeseries []TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` + Metadata []MetricMetadata `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata"` } func (m *WriteRequest) Reset() { *m = WriteRequest{} } @@ -101,14 +99,14 @@ func (m *WriteRequest) XXX_DiscardUnknown() { var xxx_messageInfo_WriteRequest proto.InternalMessageInfo -func (m *WriteRequest) GetTimeseries() []*TimeSeries { +func (m *WriteRequest) GetTimeseries() []TimeSeries { if m != nil { return m.Timeseries } return nil } -func (m *WriteRequest) GetMetadata() []*MetricMetadata { +func (m *WriteRequest) GetMetadata() []MetricMetadata { if m != nil { return m.Metadata } @@ -124,9 +122,6 @@ type ReadRequest struct { // implemented by server, error is returned. // For request that do not contain `accepted_response_types` field the SAMPLES response type will be used. AcceptedResponseTypes []ReadRequest_ResponseType `protobuf:"varint,2,rep,packed,name=accepted_response_types,json=acceptedResponseTypes,proto3,enum=prometheus_copy.ReadRequest_ResponseType" json:"accepted_response_types,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *ReadRequest) Reset() { *m = ReadRequest{} } @@ -179,10 +174,7 @@ func (m *ReadRequest) GetAcceptedResponseTypes() []ReadRequest_ResponseType { // ReadResponse is a response when response_type equals SAMPLES. type ReadResponse struct { // In same order as the request's queries. - Results []*QueryResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Results []*QueryResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` } func (m *ReadResponse) Reset() { *m = ReadResponse{} } @@ -226,13 +218,10 @@ func (m *ReadResponse) GetResults() []*QueryResult { } type Query struct { - StartTimestampMs int64 `protobuf:"varint,1,opt,name=start_timestamp_ms,json=startTimestampMs,proto3" json:"start_timestamp_ms,omitempty"` - EndTimestampMs int64 `protobuf:"varint,2,opt,name=end_timestamp_ms,json=endTimestampMs,proto3" json:"end_timestamp_ms,omitempty"` - Matchers []*LabelMatcher `protobuf:"bytes,3,rep,name=matchers,proto3" json:"matchers,omitempty"` - Hints *ReadHints `protobuf:"bytes,4,opt,name=hints,proto3" json:"hints,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + StartTimestampMs int64 `protobuf:"varint,1,opt,name=start_timestamp_ms,json=startTimestampMs,proto3" json:"start_timestamp_ms,omitempty"` + EndTimestampMs int64 `protobuf:"varint,2,opt,name=end_timestamp_ms,json=endTimestampMs,proto3" json:"end_timestamp_ms,omitempty"` + Matchers []*LabelMatcher `protobuf:"bytes,3,rep,name=matchers,proto3" json:"matchers,omitempty"` + Hints *ReadHints `protobuf:"bytes,4,opt,name=hints,proto3" json:"hints,omitempty"` } func (m *Query) Reset() { *m = Query{} } @@ -298,10 +287,7 @@ func (m *Query) GetHints() *ReadHints { type QueryResult struct { // Samples within a time series must be ordered by time. - Timeseries []*TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timeseries []*TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries,omitempty"` } func (m *QueryResult) Reset() { *m = QueryResult{} } @@ -351,10 +337,7 @@ func (m *QueryResult) GetTimeseries() []*TimeSeries { type ChunkedReadResponse struct { ChunkedSeries []*ChunkedSeries `protobuf:"bytes,1,rep,name=chunked_series,json=chunkedSeries,proto3" json:"chunked_series,omitempty"` // query_index represents an index of the query from ReadRequest.queries these chunks relates to. - QueryIndex int64 `protobuf:"varint,2,opt,name=query_index,json=queryIndex,proto3" json:"query_index,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + QueryIndex int64 `protobuf:"varint,2,opt,name=query_index,json=queryIndex,proto3" json:"query_index,omitempty"` } func (m *ChunkedReadResponse) Reset() { *m = ChunkedReadResponse{} } @@ -417,38 +400,41 @@ func init() { func init() { proto.RegisterFile("store/storepb/prompb/remote.proto", fileDescriptor_b3df75c58e6767bb) } var fileDescriptor_b3df75c58e6767bb = []byte{ - // 496 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0x4f, 0x6f, 0xd3, 0x30, - 0x18, 0xc6, 0x71, 0xbb, 0xad, 0xd5, 0x9b, 0x51, 0x2a, 0x0f, 0x58, 0x35, 0xa0, 0x2b, 0x39, 0x15, - 0x09, 0xb5, 0x53, 0x41, 0x48, 0x68, 0xa7, 0x31, 0x8a, 0xc6, 0x58, 0xf8, 0xe3, 0x16, 0x81, 0xb8, - 0x44, 0x6e, 0xf2, 0x4a, 0x8d, 0x58, 0x12, 0xcf, 0x76, 0x24, 0x72, 0xe0, 0x2b, 0xf0, 0xb9, 0x38, - 0x70, 0xe0, 0xc8, 0x11, 0xf5, 0x93, 0xa0, 0x38, 0x49, 0x95, 0x91, 0x71, 0xe2, 0x52, 0xa9, 0x8f, - 0x7f, 0xcf, 0x93, 0xf7, 0x8f, 0x0d, 0xf7, 0x95, 0x8e, 0x25, 0x8e, 0xcd, 0xaf, 0x58, 0x8c, 0x85, - 0x8c, 0x43, 0xb1, 0x18, 0x4b, 0x0c, 0x63, 0x8d, 0x23, 0x21, 0x63, 0x1d, 0xd3, 0x1b, 0x99, 0x88, - 0x7a, 0x89, 0x89, 0x72, 0xbd, 0x58, 0xa4, 0x7b, 0x83, 0x2b, 0x3d, 0x3a, 0x15, 0xa8, 0x72, 0x8b, - 0xfd, 0x8d, 0xc0, 0xf6, 0x07, 0x19, 0x68, 0x64, 0x78, 0x91, 0xa0, 0xd2, 0xf4, 0x10, 0x40, 0x07, - 0x21, 0x2a, 0x94, 0x01, 0xaa, 0x1e, 0x19, 0x34, 0x87, 0xd6, 0xe4, 0xce, 0xe8, 0xaf, 0xe0, 0xd1, - 0x3c, 0x08, 0x71, 0x66, 0x10, 0x56, 0xc1, 0xe9, 0x21, 0xb4, 0x43, 0xd4, 0xdc, 0xe7, 0x9a, 0xf7, - 0x9a, 0xc6, 0xba, 0x5f, 0xb3, 0x3a, 0xa8, 0x65, 0xe0, 0x39, 0x05, 0xc6, 0xd6, 0x86, 0xd3, 0x8d, - 0x76, 0xa3, 0xdb, 0xb4, 0x7f, 0x11, 0xb0, 0x18, 0x72, 0xbf, 0xac, 0xe7, 0x00, 0x5a, 0x17, 0x49, - 0xb5, 0x98, 0xdb, 0xb5, 0xc4, 0x77, 0x09, 0xca, 0x94, 0x95, 0x18, 0xe5, 0xb0, 0xcb, 0x3d, 0x0f, - 0x85, 0x46, 0xdf, 0x95, 0xa8, 0x44, 0x1c, 0x29, 0x74, 0x4d, 0xcf, 0xbd, 0xc6, 0xa0, 0x39, 0xec, - 0x4c, 0x1e, 0xd4, 0x12, 0x2a, 0x1f, 0x1c, 0xb1, 0xc2, 0x32, 0x4f, 0x05, 0xb2, 0x5b, 0x65, 0x52, - 0x55, 0x55, 0xf6, 0x63, 0xd8, 0xae, 0x0a, 0xd4, 0x82, 0xd6, 0xec, 0xc8, 0x79, 0x7b, 0x36, 0x9d, - 0x75, 0xaf, 0xd1, 0x5d, 0xd8, 0x99, 0xcd, 0xd9, 0xf4, 0xc8, 0x99, 0x3e, 0x77, 0x3f, 0xbe, 0x61, - 0xee, 0xf1, 0xc9, 0xfb, 0xd7, 0xaf, 0x66, 0x5d, 0x62, 0xbf, 0xc8, 0x5c, 0x7c, 0x1d, 0x45, 0x9f, - 0x40, 0x4b, 0xa2, 0x4a, 0xce, 0x75, 0xd9, 0xda, 0xdd, 0x7f, 0xb4, 0x66, 0x20, 0x56, 0xc2, 0xf6, - 0x0f, 0x02, 0x9b, 0xe6, 0x80, 0x3e, 0x04, 0xaa, 0x34, 0x97, 0xda, 0x35, 0x3b, 0xd0, 0x3c, 0x14, - 0x6e, 0x98, 0x85, 0x91, 0x61, 0x93, 0x75, 0xcd, 0xc9, 0xbc, 0x3c, 0x70, 0x14, 0x1d, 0x42, 0x17, - 0x23, 0xff, 0x32, 0xdb, 0x30, 0x6c, 0x07, 0x23, 0xbf, 0x4a, 0x3e, 0x85, 0x76, 0xc8, 0xb5, 0xb7, - 0x44, 0xa9, 0x8a, 0x3d, 0xde, 0xab, 0x95, 0x76, 0xc6, 0x17, 0x78, 0xee, 0xe4, 0x14, 0x5b, 0xe3, - 0xf4, 0x00, 0x36, 0x97, 0x41, 0xa4, 0x55, 0x6f, 0x63, 0x40, 0x86, 0xd6, 0x64, 0xef, 0xca, 0x59, - 0x9f, 0x64, 0x04, 0xcb, 0x41, 0xfb, 0x14, 0xac, 0x4a, 0x9b, 0xff, 0x75, 0x01, 0xed, 0xaf, 0xb0, - 0x73, 0xbc, 0x4c, 0xa2, 0xcf, 0xd9, 0xc2, 0x2a, 0x93, 0x9e, 0x42, 0xc7, 0xcb, 0x65, 0xf7, 0x52, - 0x6e, 0xbf, 0x96, 0x5b, 0xb8, 0x8b, 0xe8, 0xeb, 0x5e, 0xf5, 0x2f, 0xdd, 0x07, 0x2b, 0xbb, 0x64, - 0xa9, 0x1b, 0x44, 0x3e, 0x7e, 0x29, 0x66, 0x07, 0x46, 0x7a, 0x99, 0x29, 0xcf, 0x6e, 0x7e, 0x5f, - 0xf5, 0xc9, 0xcf, 0x55, 0x9f, 0xfc, 0x5e, 0xf5, 0xc9, 0xa7, 0xad, 0xfc, 0xbd, 0x2d, 0xb6, 0xcc, - 0x53, 0x7b, 0xf4, 0x27, 0x00, 0x00, 0xff, 0xff, 0xac, 0xb1, 0xdb, 0x4d, 0xc2, 0x03, 0x00, 0x00, + // 535 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x9b, 0xb5, 0x5b, 0xab, 0x37, 0xa3, 0x54, 0x1e, 0xb0, 0xaa, 0x40, 0x5a, 0x72, 0x2a, + 0x12, 0x6a, 0xa7, 0x82, 0x90, 0x10, 0xa7, 0x6e, 0x14, 0x8d, 0xb1, 0xf0, 0xc7, 0x2d, 0x02, 0x71, + 0x89, 0xdc, 0xe4, 0xd5, 0x1a, 0xb1, 0xfc, 0x99, 0xed, 0x48, 0xf4, 0xc0, 0x77, 0xe0, 0xcc, 0x27, + 0xda, 0x81, 0xc3, 0x8e, 0x3b, 0x21, 0xd4, 0x7e, 0x11, 0x14, 0x27, 0xa9, 0x32, 0x3a, 0x0e, 0x5c, + 0x22, 0xe7, 0xf1, 0xef, 0x79, 0x6c, 0xbf, 0x7e, 0x0d, 0x0f, 0x84, 0x0c, 0x39, 0xf6, 0xd5, 0x37, + 0x9a, 0xf6, 0x23, 0x1e, 0xfa, 0xd1, 0xb4, 0xcf, 0xd1, 0x0f, 0x25, 0xf6, 0x22, 0x1e, 0xca, 0x90, + 0xdc, 0x4c, 0x44, 0x94, 0x33, 0x8c, 0x85, 0xed, 0x84, 0xd1, 0xbc, 0xd5, 0xb9, 0xd6, 0x23, 0xe7, + 0x11, 0x8a, 0xd4, 0xd2, 0xba, 0x75, 0x12, 0x9e, 0x84, 0x6a, 0xd8, 0x4f, 0x46, 0xa9, 0x6a, 0xfe, + 0xd0, 0x60, 0xfb, 0x23, 0xf7, 0x24, 0x52, 0x3c, 0x8b, 0x51, 0x48, 0x32, 0x04, 0x90, 0x9e, 0x8f, + 0x02, 0xb9, 0x87, 0xa2, 0xa9, 0x75, 0xca, 0x5d, 0x7d, 0x70, 0xb7, 0xf7, 0xd7, 0x72, 0xbd, 0x89, + 0xe7, 0xe3, 0x58, 0x21, 0xfb, 0x95, 0xf3, 0x5f, 0xed, 0x12, 0x2d, 0x98, 0xc8, 0x10, 0x6a, 0x3e, + 0x4a, 0xe6, 0x32, 0xc9, 0x9a, 0x65, 0x15, 0xd0, 0x5e, 0x0b, 0xb0, 0x50, 0x72, 0xcf, 0xb1, 0x32, + 0x2c, 0x0b, 0x59, 0xd9, 0x8e, 0x2a, 0xb5, 0x8d, 0x46, 0xd9, 0xbc, 0xd4, 0x40, 0xa7, 0xc8, 0xdc, + 0x7c, 0x6f, 0x7b, 0x50, 0x3d, 0x8b, 0x8b, 0x1b, 0xbb, 0xb3, 0x96, 0xfb, 0x3e, 0x46, 0x3e, 0xa7, + 0x39, 0x46, 0x18, 0xec, 0x32, 0xc7, 0xc1, 0x48, 0xa2, 0x6b, 0x73, 0x14, 0x51, 0x18, 0x08, 0xb4, + 0x55, 0x55, 0x9a, 0x1b, 0x9d, 0x72, 0xb7, 0x3e, 0x78, 0xb8, 0x96, 0x50, 0x58, 0xb0, 0x47, 0x33, + 0xcb, 0x64, 0x1e, 0x21, 0xbd, 0x9d, 0x27, 0x15, 0x55, 0x61, 0x3e, 0x81, 0xed, 0xa2, 0x40, 0x74, + 0xa8, 0x8e, 0x87, 0xd6, 0xbb, 0xe3, 0xd1, 0xb8, 0x51, 0x22, 0xbb, 0xb0, 0x33, 0x9e, 0xd0, 0xd1, + 0xd0, 0x1a, 0xbd, 0xb0, 0x3f, 0xbd, 0xa5, 0xf6, 0xc1, 0xe1, 0x87, 0x37, 0xaf, 0xc7, 0x0d, 0xcd, + 0x7c, 0x99, 0xb8, 0xd8, 0x2a, 0x8a, 0x3c, 0x85, 0x2a, 0x47, 0x11, 0x9f, 0xca, 0xfc, 0x68, 0xf7, + 0xfe, 0x71, 0x34, 0x05, 0xd1, 0x1c, 0x36, 0x7f, 0x6a, 0xb0, 0xa9, 0x26, 0xc8, 0x23, 0x20, 0x42, + 0x32, 0x2e, 0x6d, 0x75, 0x13, 0x92, 0xf9, 0x91, 0xed, 0x27, 0x61, 0x5a, 0xb7, 0x4c, 0x1b, 0x6a, + 0x66, 0x92, 0x4f, 0x58, 0x82, 0x74, 0xa1, 0x81, 0x81, 0x7b, 0x95, 0xdd, 0x50, 0x6c, 0x1d, 0x03, + 0xb7, 0x48, 0x3e, 0x83, 0x9a, 0xcf, 0xa4, 0x33, 0x43, 0x2e, 0xb2, 0xdb, 0xbc, 0xbf, 0xb6, 0xb5, + 0x63, 0x36, 0xc5, 0x53, 0x2b, 0xa5, 0xe8, 0x0a, 0x27, 0x7b, 0xb0, 0x39, 0xf3, 0x02, 0x29, 0x9a, + 0x95, 0x8e, 0xd6, 0xd5, 0x07, 0xad, 0x6b, 0x6b, 0x7d, 0x98, 0x10, 0x34, 0x05, 0xcd, 0x23, 0xd0, + 0x0b, 0xc7, 0x24, 0xcf, 0xff, 0xb3, 0x19, 0x8b, 0x6d, 0x68, 0x7e, 0x83, 0x9d, 0x83, 0x59, 0x1c, + 0x7c, 0x49, 0x2e, 0xac, 0x50, 0xe9, 0x11, 0xd4, 0x9d, 0x54, 0xb6, 0xaf, 0xe4, 0x1a, 0x6b, 0xb9, + 0x99, 0x3b, 0x8b, 0xbe, 0xe1, 0x14, 0x7f, 0x49, 0x1b, 0xf4, 0xa4, 0xc9, 0xe6, 0xb6, 0x17, 0xb8, + 0xf8, 0x35, 0xab, 0x1d, 0x28, 0xe9, 0x55, 0xa2, 0xec, 0x77, 0xce, 0x17, 0x86, 0x76, 0xb1, 0x30, + 0xb4, 0xdf, 0x0b, 0x43, 0xfb, 0xbe, 0x34, 0x4a, 0x17, 0x4b, 0xa3, 0x74, 0xb9, 0x34, 0x4a, 0x9f, + 0xb7, 0xd2, 0xd7, 0x39, 0xdd, 0x52, 0x4f, 0xf0, 0xf1, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x32, + 0xa1, 0xf6, 0xc4, 0xf0, 0x03, 0x00, 0x00, } func (m *WriteRequest) Marshal() (dAtA []byte, err error) { @@ -471,10 +457,6 @@ func (m *WriteRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Metadata) > 0 { for iNdEx := len(m.Metadata) - 1; iNdEx >= 0; iNdEx-- { { @@ -526,10 +508,6 @@ func (m *ReadRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.AcceptedResponseTypes) > 0 { dAtA2 := make([]byte, len(m.AcceptedResponseTypes)*10) var j1 int @@ -585,10 +563,6 @@ func (m *ReadResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Results) > 0 { for iNdEx := len(m.Results) - 1; iNdEx >= 0; iNdEx-- { { @@ -626,10 +600,6 @@ func (m *Query) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Hints != nil { { size, err := m.Hints.MarshalToSizedBuffer(dAtA[:i]) @@ -689,10 +659,6 @@ func (m *QueryResult) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Timeseries) > 0 { for iNdEx := len(m.Timeseries) - 1; iNdEx >= 0; iNdEx-- { { @@ -730,10 +696,6 @@ func (m *ChunkedReadResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.QueryIndex != 0 { i = encodeVarintRemote(dAtA, i, uint64(m.QueryIndex)) i-- @@ -785,9 +747,6 @@ func (m *WriteRequest) Size() (n int) { n += 1 + l + sovRemote(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -810,9 +769,6 @@ func (m *ReadRequest) Size() (n int) { } n += 1 + sovRemote(uint64(l)) + l } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -828,9 +784,6 @@ func (m *ReadResponse) Size() (n int) { n += 1 + l + sovRemote(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -856,9 +809,6 @@ func (m *Query) Size() (n int) { l = m.Hints.Size() n += 1 + l + sovRemote(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -874,9 +824,6 @@ func (m *QueryResult) Size() (n int) { n += 1 + l + sovRemote(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -895,9 +842,6 @@ func (m *ChunkedReadResponse) Size() (n int) { if m.QueryIndex != 0 { n += 1 + sovRemote(uint64(m.QueryIndex)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -965,7 +909,7 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Timeseries = append(m.Timeseries, &TimeSeries{}) + m.Timeseries = append(m.Timeseries, TimeSeries{}) if err := m.Timeseries[len(m.Timeseries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -999,7 +943,7 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Metadata = append(m.Metadata, &MetricMetadata{}) + m.Metadata = append(m.Metadata, MetricMetadata{}) if err := m.Metadata[len(m.Metadata)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1016,7 +960,6 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1170,7 +1113,6 @@ func (m *ReadRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1255,7 +1197,6 @@ func (m *ReadResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1414,7 +1355,6 @@ func (m *Query) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1499,7 +1439,6 @@ func (m *QueryResult) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1603,7 +1542,6 @@ func (m *ChunkedReadResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/store/storepb/prompb/remote.proto b/pkg/store/storepb/prompb/remote.proto index 3f07eae972..b765bb7906 100644 --- a/pkg/store/storepb/prompb/remote.proto +++ b/pkg/store/storepb/prompb/remote.proto @@ -20,14 +20,20 @@ package prometheus_copy; option go_package = "prompb"; import "store/storepb/prompb/types.proto"; +import "gogoproto/gogo.proto"; +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; message WriteRequest { - repeated TimeSeries timeseries = 1; + repeated TimeSeries timeseries = 1 [(gogoproto.nullable) = false]; // Cortex uses this field to determine the source of the write request. // We reserve it to avoid any compatibility issues. reserved 2; - repeated MetricMetadata metadata = 3; + repeated MetricMetadata metadata = 3 [(gogoproto.nullable) = false]; } // ReadRequest represents a remote read request. diff --git a/pkg/store/storepb/prompb/samples.go b/pkg/store/storepb/prompb/samples.go index 6d00642080..0da00daf4d 100644 --- a/pkg/store/storepb/prompb/samples.go +++ b/pkg/store/storepb/prompb/samples.go @@ -25,12 +25,12 @@ func SamplesFromSamplePairs(samples []model.SamplePair) []Sample { // SamplesFromPromqlSamples converts a slice of promql.Sample // to a slice of Sample. -func SamplesFromPromqlSamples(samples ...promql.Sample) ([]*Sample, []*Histogram) { - floats := make([]*Sample, 0, len(samples)) - histograms := make([]*Histogram, 0, len(samples)) +func SamplesFromPromqlSamples(samples ...promql.Sample) ([]Sample, []Histogram) { + floats := make([]Sample, 0, len(samples)) + histograms := make([]Histogram, 0, len(samples)) for _, s := range samples { if s.H == nil { - floats = append(floats, &Sample{ + floats = append(floats, Sample{ Value: s.F, Timestamp: s.T, }) @@ -43,15 +43,15 @@ func SamplesFromPromqlSamples(samples ...promql.Sample) ([]*Sample, []*Histogram } // SamplesFromPromqlSeries converts promql.Series to a slice of Sample and a slice of Histogram. -func SamplesFromPromqlSeries(series promql.Series) ([]*Sample, []*Histogram) { - floats := make([]*Sample, 0, len(series.Floats)) +func SamplesFromPromqlSeries(series promql.Series) ([]Sample, []Histogram) { + floats := make([]Sample, 0, len(series.Floats)) for _, f := range series.Floats { - floats = append(floats, &Sample{ + floats = append(floats, Sample{ Value: f.F, Timestamp: f.T, }) } - histograms := make([]*Histogram, 0, len(series.Histograms)) + histograms := make([]Histogram, 0, len(series.Histograms)) for _, h := range series.Histograms { histograms = append(histograms, FloatHistogramToHistogramProto(h.T, h.H)) } @@ -123,7 +123,7 @@ func HistogramProtoToFloatHistogram(hp Histogram) *histogram.FloatHistogram { } } -func spansProtoToSpans(s []*BucketSpan) []histogram.Span { +func spansProtoToSpans(s []BucketSpan) []histogram.Span { spans := make([]histogram.Span, len(s)) for i := 0; i < len(s); i++ { spans[i] = histogram.Span{Offset: s[i].Offset, Length: s[i].Length} @@ -143,8 +143,8 @@ func deltasToCounts(deltas []int64) []float64 { } // Copied from https://github.com/prometheus/prometheus/blob/0ab95536115adfe50af249d36d73674be694ca3f/storage/remote/codec.go#L709-L723 -func HistogramToHistogramProto(timestamp int64, h *histogram.Histogram) *Histogram { - return &Histogram{ +func HistogramToHistogramProto(timestamp int64, h *histogram.Histogram) Histogram { + return Histogram{ Count: &Histogram_CountInt{CountInt: h.Count}, Sum: h.Sum, Schema: h.Schema, @@ -160,8 +160,8 @@ func HistogramToHistogramProto(timestamp int64, h *histogram.Histogram) *Histogr } // Copied from https://github.com/prometheus/prometheus/blob/0ab95536115adfe50af249d36d73674be694ca3f/storage/remote/codec.go#L725-L739 -func FloatHistogramToHistogramProto(timestamp int64, fh *histogram.FloatHistogram) *Histogram { - return &Histogram{ +func FloatHistogramToHistogramProto(timestamp int64, fh *histogram.FloatHistogram) Histogram { + return Histogram{ Count: &Histogram_CountFloat{CountFloat: fh.Count}, Sum: fh.Sum, Schema: fh.Schema, @@ -176,10 +176,10 @@ func FloatHistogramToHistogramProto(timestamp int64, fh *histogram.FloatHistogra } } -func spansToSpansProto(s []histogram.Span) []*BucketSpan { - spans := make([]*BucketSpan, len(s)) +func spansToSpansProto(s []histogram.Span) []BucketSpan { + spans := make([]BucketSpan, len(s)) for i := 0; i < len(s); i++ { - spans[i] = &BucketSpan{Offset: s[i].Offset, Length: s[i].Length} + spans[i] = BucketSpan{Offset: s[i].Offset, Length: s[i].Length} } return spans diff --git a/pkg/store/storepb/prompb/types.pb.go b/pkg/store/storepb/prompb/types.pb.go index a1effd2abd..00e8b97e21 100644 --- a/pkg/store/storepb/prompb/types.pb.go +++ b/pkg/store/storepb/prompb/types.pb.go @@ -10,6 +10,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" ) @@ -165,13 +166,10 @@ func (Chunk_Encoding) EnumDescriptor() ([]byte, []int) { type MetricMetadata struct { // Represents the metric type, these match the set from Prometheus. // Refer to pkg/textparse/interface.go for details. - Type MetricMetadata_MetricType `protobuf:"varint,1,opt,name=type,proto3,enum=prometheus_copy.MetricMetadata_MetricType" json:"type,omitempty"` - MetricFamilyName string `protobuf:"bytes,2,opt,name=metric_family_name,json=metricFamilyName,proto3" json:"metric_family_name,omitempty"` - Help string `protobuf:"bytes,4,opt,name=help,proto3" json:"help,omitempty"` - Unit string `protobuf:"bytes,5,opt,name=unit,proto3" json:"unit,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Type MetricMetadata_MetricType `protobuf:"varint,1,opt,name=type,proto3,enum=prometheus_copy.MetricMetadata_MetricType" json:"type,omitempty"` + MetricFamilyName string `protobuf:"bytes,2,opt,name=metric_family_name,json=metricFamilyName,proto3" json:"metric_family_name,omitempty"` + Help string `protobuf:"bytes,4,opt,name=help,proto3" json:"help,omitempty"` + Unit string `protobuf:"bytes,5,opt,name=unit,proto3" json:"unit,omitempty"` } func (m *MetricMetadata) Reset() { *m = MetricMetadata{} } @@ -236,11 +234,8 @@ func (m *MetricMetadata) GetUnit() string { } type Sample struct { - Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` - Timestamp int64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` + Timestamp int64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` } func (m *Sample) Reset() { *m = Sample{} } @@ -292,14 +287,11 @@ func (m *Sample) GetTimestamp() int64 { type Exemplar struct { // Optional, can be empty. - Labels []*labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` - Value float64 `protobuf:"fixed64,2,opt,name=value,proto3" json:"value,omitempty"` + Labels []labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels"` + Value float64 `protobuf:"fixed64,2,opt,name=value,proto3" json:"value,omitempty"` // timestamp is in ms format, see pkg/timestamp/timestamp.go for // conversion from time.Time to Prometheus timestamp. - Timestamp int64 `protobuf:"varint,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timestamp int64 `protobuf:"varint,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"` } func (m *Exemplar) Reset() { *m = Exemplar{} } @@ -335,7 +327,7 @@ func (m *Exemplar) XXX_DiscardUnknown() { var xxx_messageInfo_Exemplar proto.InternalMessageInfo -func (m *Exemplar) GetLabels() []*labelpb.Label { +func (m *Exemplar) GetLabels() []labelpb.Label { if m != nil { return m.Labels } @@ -384,14 +376,14 @@ type Histogram struct { // *Histogram_ZeroCountFloat ZeroCount isHistogram_ZeroCount `protobuf_oneof:"zero_count"` // Negative Buckets. - NegativeSpans []*BucketSpan `protobuf:"bytes,8,rep,name=negative_spans,json=negativeSpans,proto3" json:"negative_spans,omitempty"` + NegativeSpans []BucketSpan `protobuf:"bytes,8,rep,name=negative_spans,json=negativeSpans,proto3" json:"negative_spans"` // Use either "negative_deltas" or "negative_counts", the former for // regular histograms with integer counts, the latter for float // histograms. NegativeDeltas []int64 `protobuf:"zigzag64,9,rep,packed,name=negative_deltas,json=negativeDeltas,proto3" json:"negative_deltas,omitempty"` NegativeCounts []float64 `protobuf:"fixed64,10,rep,packed,name=negative_counts,json=negativeCounts,proto3" json:"negative_counts,omitempty"` // Positive Buckets. - PositiveSpans []*BucketSpan `protobuf:"bytes,11,rep,name=positive_spans,json=positiveSpans,proto3" json:"positive_spans,omitempty"` + PositiveSpans []BucketSpan `protobuf:"bytes,11,rep,name=positive_spans,json=positiveSpans,proto3" json:"positive_spans"` // Use either "positive_deltas" or "positive_counts", the former for // regular histograms with integer counts, the latter for float // histograms. @@ -400,10 +392,7 @@ type Histogram struct { ResetHint Histogram_ResetHint `protobuf:"varint,14,opt,name=reset_hint,json=resetHint,proto3,enum=prometheus_copy.Histogram_ResetHint" json:"reset_hint,omitempty"` // timestamp is in ms format, see model/timestamp/timestamp.go for // conversion from time.Time to Prometheus timestamp. - Timestamp int64 `protobuf:"varint,15,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timestamp int64 `protobuf:"varint,15,opt,name=timestamp,proto3" json:"timestamp,omitempty"` } func (m *Histogram) Reset() { *m = Histogram{} } @@ -530,7 +519,7 @@ func (m *Histogram) GetZeroCountFloat() float64 { return 0 } -func (m *Histogram) GetNegativeSpans() []*BucketSpan { +func (m *Histogram) GetNegativeSpans() []BucketSpan { if m != nil { return m.NegativeSpans } @@ -551,7 +540,7 @@ func (m *Histogram) GetNegativeCounts() []float64 { return nil } -func (m *Histogram) GetPositiveSpans() []*BucketSpan { +func (m *Histogram) GetPositiveSpans() []BucketSpan { if m != nil { return m.PositiveSpans } @@ -602,11 +591,8 @@ func (*Histogram) XXX_OneofWrappers() []interface{} { // more compact in the way the data is structured here (with all the // buckets in a single array separate from the Spans). type BucketSpan struct { - Offset int32 `protobuf:"zigzag32,1,opt,name=offset,proto3" json:"offset,omitempty"` - Length uint32 `protobuf:"varint,2,opt,name=length,proto3" json:"length,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Offset int32 `protobuf:"zigzag32,1,opt,name=offset,proto3" json:"offset,omitempty"` + Length uint32 `protobuf:"varint,2,opt,name=length,proto3" json:"length,omitempty"` } func (m *BucketSpan) Reset() { *m = BucketSpan{} } @@ -659,13 +645,10 @@ func (m *BucketSpan) GetLength() uint32 { // TimeSeries represents samples and labels for a single time series. type TimeSeries struct { // Labels have to be sorted by label names and without duplicated label names. - Labels []*labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` - Samples []*Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"samples,omitempty"` - Exemplars []*Exemplar `protobuf:"bytes,3,rep,name=exemplars,proto3" json:"exemplars,omitempty"` - Histograms []*Histogram `protobuf:"bytes,4,rep,name=histograms,proto3" json:"histograms,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels"` + Samples []Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"samples"` + Exemplars []Exemplar `protobuf:"bytes,3,rep,name=exemplars,proto3" json:"exemplars"` + Histograms []Histogram `protobuf:"bytes,4,rep,name=histograms,proto3" json:"histograms"` } func (m *TimeSeries) Reset() { *m = TimeSeries{} } @@ -701,28 +684,28 @@ func (m *TimeSeries) XXX_DiscardUnknown() { var xxx_messageInfo_TimeSeries proto.InternalMessageInfo -func (m *TimeSeries) GetLabels() []*labelpb.Label { +func (m *TimeSeries) GetLabels() []labelpb.Label { if m != nil { return m.Labels } return nil } -func (m *TimeSeries) GetSamples() []*Sample { +func (m *TimeSeries) GetSamples() []Sample { if m != nil { return m.Samples } return nil } -func (m *TimeSeries) GetExemplars() []*Exemplar { +func (m *TimeSeries) GetExemplars() []Exemplar { if m != nil { return m.Exemplars } return nil } -func (m *TimeSeries) GetHistograms() []*Histogram { +func (m *TimeSeries) GetHistograms() []Histogram { if m != nil { return m.Histograms } @@ -731,12 +714,9 @@ func (m *TimeSeries) GetHistograms() []*Histogram { // Matcher specifies a rule, which can match or set of labels or not. type LabelMatcher struct { - Type LabelMatcher_Type `protobuf:"varint,1,opt,name=type,proto3,enum=prometheus_copy.LabelMatcher_Type" json:"type,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Type LabelMatcher_Type `protobuf:"varint,1,opt,name=type,proto3,enum=prometheus_copy.LabelMatcher_Type" json:"type,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` } func (m *LabelMatcher) Reset() { *m = LabelMatcher{} } @@ -794,16 +774,13 @@ func (m *LabelMatcher) GetValue() string { } type ReadHints struct { - StepMs int64 `protobuf:"varint,1,opt,name=step_ms,json=stepMs,proto3" json:"step_ms,omitempty"` - Func string `protobuf:"bytes,2,opt,name=func,proto3" json:"func,omitempty"` - StartMs int64 `protobuf:"varint,3,opt,name=start_ms,json=startMs,proto3" json:"start_ms,omitempty"` - EndMs int64 `protobuf:"varint,4,opt,name=end_ms,json=endMs,proto3" json:"end_ms,omitempty"` - Grouping []string `protobuf:"bytes,5,rep,name=grouping,proto3" json:"grouping,omitempty"` - By bool `protobuf:"varint,6,opt,name=by,proto3" json:"by,omitempty"` - RangeMs int64 `protobuf:"varint,7,opt,name=range_ms,json=rangeMs,proto3" json:"range_ms,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + StepMs int64 `protobuf:"varint,1,opt,name=step_ms,json=stepMs,proto3" json:"step_ms,omitempty"` + Func string `protobuf:"bytes,2,opt,name=func,proto3" json:"func,omitempty"` + StartMs int64 `protobuf:"varint,3,opt,name=start_ms,json=startMs,proto3" json:"start_ms,omitempty"` + EndMs int64 `protobuf:"varint,4,opt,name=end_ms,json=endMs,proto3" json:"end_ms,omitempty"` + Grouping []string `protobuf:"bytes,5,rep,name=grouping,proto3" json:"grouping,omitempty"` + By bool `protobuf:"varint,6,opt,name=by,proto3" json:"by,omitempty"` + RangeMs int64 `protobuf:"varint,7,opt,name=range_ms,json=rangeMs,proto3" json:"range_ms,omitempty"` } func (m *ReadHints) Reset() { *m = ReadHints{} } @@ -891,13 +868,10 @@ func (m *ReadHints) GetRangeMs() int64 { // Chunk represents a TSDB chunk. // Time range [min, max] is inclusive. type Chunk struct { - MinTimeMs int64 `protobuf:"varint,1,opt,name=min_time_ms,json=minTimeMs,proto3" json:"min_time_ms,omitempty"` - MaxTimeMs int64 `protobuf:"varint,2,opt,name=max_time_ms,json=maxTimeMs,proto3" json:"max_time_ms,omitempty"` - Type Chunk_Encoding `protobuf:"varint,3,opt,name=type,proto3,enum=prometheus_copy.Chunk_Encoding" json:"type,omitempty"` - Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + MinTimeMs int64 `protobuf:"varint,1,opt,name=min_time_ms,json=minTimeMs,proto3" json:"min_time_ms,omitempty"` + MaxTimeMs int64 `protobuf:"varint,2,opt,name=max_time_ms,json=maxTimeMs,proto3" json:"max_time_ms,omitempty"` + Type Chunk_Encoding `protobuf:"varint,3,opt,name=type,proto3,enum=prometheus_copy.Chunk_Encoding" json:"type,omitempty"` + Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` } func (m *Chunk) Reset() { *m = Chunk{} } @@ -964,12 +938,9 @@ func (m *Chunk) GetData() []byte { // ChunkedSeries represents single, encoded time series. type ChunkedSeries struct { // Labels should be sorted. - Labels []*labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` + Labels []labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels"` // Chunks will be in start time order and may overlap. - Chunks []*Chunk `protobuf:"bytes,2,rep,name=chunks,proto3" json:"chunks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Chunks []Chunk `protobuf:"bytes,2,rep,name=chunks,proto3" json:"chunks"` } func (m *ChunkedSeries) Reset() { *m = ChunkedSeries{} } @@ -1005,14 +976,14 @@ func (m *ChunkedSeries) XXX_DiscardUnknown() { var xxx_messageInfo_ChunkedSeries proto.InternalMessageInfo -func (m *ChunkedSeries) GetLabels() []*labelpb.Label { +func (m *ChunkedSeries) GetLabels() []labelpb.Label { if m != nil { return m.Labels } return nil } -func (m *ChunkedSeries) GetChunks() []*Chunk { +func (m *ChunkedSeries) GetChunks() []Chunk { if m != nil { return m.Chunks } @@ -1039,74 +1010,77 @@ func init() { func init() { proto.RegisterFile("store/storepb/prompb/types.proto", fileDescriptor_166e07899dab7c14) } var fileDescriptor_166e07899dab7c14 = []byte{ - // 1071 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x56, 0x5f, 0x8f, 0xdb, 0x44, - 0x10, 0xaf, 0xe3, 0xc4, 0x89, 0xe7, 0x92, 0x9c, 0xbb, 0x94, 0xd6, 0x3d, 0xe0, 0x08, 0x16, 0x85, - 0xa8, 0x42, 0x39, 0xd1, 0x22, 0x90, 0x50, 0x85, 0x74, 0x77, 0xcd, 0xfd, 0x11, 0x4d, 0xa2, 0x6e, - 0x72, 0x82, 0xf2, 0x12, 0x6d, 0x92, 0x4d, 0x6c, 0xd5, 0xff, 0xe4, 0xdd, 0x54, 0x17, 0x3e, 0x0a, - 0x6f, 0x7c, 0x04, 0xbe, 0x05, 0x4f, 0x88, 0x27, 0x5e, 0x78, 0x41, 0xf7, 0x49, 0xd0, 0x8e, 0xed, - 0x38, 0xb9, 0x50, 0xa9, 0xbc, 0x44, 0x3b, 0xbf, 0xf9, 0xcd, 0xce, 0xcf, 0x3b, 0xb3, 0xb3, 0x81, - 0x96, 0x90, 0x51, 0xc2, 0x8f, 0xf0, 0x37, 0x9e, 0x1c, 0xc5, 0x49, 0x14, 0xc4, 0x93, 0x23, 0xb9, - 0x8a, 0xb9, 0xe8, 0xc4, 0x49, 0x24, 0x23, 0xb2, 0xaf, 0x30, 0x2e, 0x5d, 0xbe, 0x14, 0xe3, 0x69, - 0x14, 0xaf, 0x0e, 0x1e, 0xa6, 0x21, 0x3e, 0x9b, 0x70, 0x7f, 0x9b, 0xeb, 0xfc, 0x5a, 0x82, 0x66, - 0x8f, 0xcb, 0xc4, 0x9b, 0xf6, 0xb8, 0x64, 0x33, 0x26, 0x19, 0xf9, 0x0e, 0xca, 0x8a, 0x61, 0x6b, - 0x2d, 0xad, 0xdd, 0x7c, 0xf2, 0xb8, 0x73, 0x6b, 0xb7, 0xce, 0x36, 0x3d, 0x33, 0x47, 0xab, 0x98, - 0x53, 0x8c, 0x23, 0x5f, 0x00, 0x09, 0x10, 0x1b, 0xcf, 0x59, 0xe0, 0xf9, 0xab, 0x71, 0xc8, 0x02, - 0x6e, 0x97, 0x5a, 0x5a, 0xdb, 0xa4, 0x56, 0xea, 0x39, 0x43, 0x47, 0x9f, 0x05, 0x9c, 0x10, 0x28, - 0xbb, 0xdc, 0x8f, 0xed, 0x32, 0xfa, 0x71, 0xad, 0xb0, 0x65, 0xe8, 0x49, 0xbb, 0x92, 0x62, 0x6a, - 0xed, 0xac, 0x00, 0x8a, 0x4c, 0x64, 0x0f, 0xaa, 0x57, 0xfd, 0xef, 0xfb, 0x83, 0x1f, 0xfa, 0xd6, - 0x1d, 0x65, 0x9c, 0x0e, 0xae, 0xfa, 0xa3, 0x2e, 0xb5, 0x34, 0x62, 0x42, 0xe5, 0xfc, 0xf8, 0xea, - 0xbc, 0x6b, 0x95, 0x48, 0x03, 0xcc, 0x8b, 0xcb, 0xe1, 0x68, 0x70, 0x4e, 0x8f, 0x7b, 0x96, 0x4e, - 0x08, 0x34, 0xd1, 0x53, 0x60, 0x65, 0x15, 0x3a, 0xbc, 0xea, 0xf5, 0x8e, 0xe9, 0x2b, 0xab, 0x42, - 0x6a, 0x50, 0xbe, 0xec, 0x9f, 0x0d, 0x2c, 0x83, 0xd4, 0xa1, 0x36, 0x1c, 0x1d, 0x8f, 0xba, 0xc3, - 0xee, 0xc8, 0xaa, 0x3a, 0xcf, 0xc0, 0x18, 0xb2, 0x20, 0xf6, 0x39, 0xb9, 0x07, 0x95, 0x37, 0xcc, - 0x5f, 0xa6, 0x67, 0xa3, 0xd1, 0xd4, 0x20, 0x1f, 0x82, 0x29, 0xbd, 0x80, 0x0b, 0xc9, 0x82, 0x18, - 0xbf, 0x53, 0xa7, 0x05, 0xe0, 0x70, 0xa8, 0x75, 0xaf, 0x79, 0x10, 0xfb, 0x2c, 0x21, 0x8f, 0xc0, - 0xc0, 0x22, 0x08, 0x5b, 0x6b, 0xe9, 0xed, 0xbd, 0x27, 0x8d, 0x8e, 0x74, 0x59, 0x18, 0x89, 0xce, - 0x0b, 0x85, 0xd2, 0xcc, 0x59, 0xa4, 0x29, 0xbd, 0x35, 0x8d, 0x7e, 0x3b, 0xcd, 0x1f, 0x15, 0x30, - 0x2f, 0x3c, 0x21, 0xa3, 0x45, 0xc2, 0x02, 0xf2, 0x11, 0x98, 0xd3, 0x68, 0x19, 0xca, 0xb1, 0x17, - 0x4a, 0x14, 0x5b, 0xbe, 0xb8, 0x43, 0x6b, 0x08, 0x5d, 0x86, 0x92, 0x7c, 0x02, 0x7b, 0xa9, 0x7b, - 0xee, 0x47, 0x4c, 0xa6, 0x69, 0x2e, 0xee, 0x50, 0x40, 0xf0, 0x4c, 0x61, 0xc4, 0x02, 0x5d, 0x2c, - 0x03, 0xcc, 0xa3, 0x51, 0xb5, 0x24, 0xf7, 0xc1, 0x10, 0x53, 0x97, 0x07, 0x0c, 0x6b, 0x75, 0x97, - 0x66, 0x16, 0x79, 0x04, 0xcd, 0x9f, 0x79, 0x12, 0x8d, 0xa5, 0x9b, 0x70, 0xe1, 0x46, 0xfe, 0x0c, - 0xeb, 0xa6, 0xd1, 0x86, 0x42, 0x47, 0x39, 0x48, 0x3e, 0xcb, 0x68, 0x85, 0x2e, 0x03, 0x75, 0x69, - 0xb4, 0xae, 0xf0, 0xd3, 0x5c, 0xdb, 0x63, 0xb0, 0x36, 0x78, 0xa9, 0xc0, 0x2a, 0x0a, 0xd4, 0x68, - 0x73, 0xcd, 0x4c, 0x45, 0x9e, 0x40, 0x33, 0xe4, 0x0b, 0x26, 0xbd, 0x37, 0x7c, 0x2c, 0x62, 0x16, - 0x0a, 0xbb, 0x86, 0xe7, 0xfa, 0xc1, 0x4e, 0xd3, 0x9e, 0x2c, 0xa7, 0xaf, 0xb9, 0x1c, 0xc6, 0x2c, - 0xa4, 0x8d, 0x3c, 0x44, 0x59, 0x82, 0x7c, 0x0e, 0xfb, 0xeb, 0x3d, 0x66, 0xdc, 0x97, 0x4c, 0xd8, - 0x66, 0x4b, 0x6f, 0x13, 0xba, 0xde, 0xfa, 0x39, 0xa2, 0x5b, 0x44, 0x14, 0x27, 0x6c, 0x68, 0xe9, - 0x6d, 0xad, 0x20, 0xa2, 0x32, 0xa1, 0x54, 0xc5, 0x91, 0xf0, 0x36, 0x54, 0xed, 0xbd, 0x83, 0xaa, - 0x3c, 0x64, 0xad, 0x6a, 0xbd, 0x47, 0xa6, 0xaa, 0x9e, 0xaa, 0xca, 0xe1, 0x42, 0xd5, 0x9a, 0x98, - 0xa9, 0x6a, 0xa4, 0xaa, 0x72, 0x38, 0x53, 0x75, 0x0a, 0x90, 0x70, 0xc1, 0xe5, 0xd8, 0x55, 0x67, - 0xdf, 0xc4, 0xcb, 0xfd, 0xe9, 0x8e, 0xa2, 0x75, 0x0b, 0x75, 0xa8, 0x22, 0x5f, 0x78, 0xa1, 0xa4, - 0x66, 0x92, 0x2f, 0xb7, 0x7b, 0x70, 0xff, 0x76, 0x0f, 0x7e, 0x05, 0xe6, 0x3a, 0x6a, 0xfb, 0x8a, - 0x56, 0x41, 0x7f, 0xd5, 0x1d, 0x5a, 0x1a, 0x31, 0xa0, 0xd4, 0x1f, 0x58, 0xa5, 0xe2, 0x9a, 0xea, - 0x27, 0x55, 0xa8, 0xa0, 0xf0, 0x93, 0x3a, 0x40, 0x51, 0x79, 0xe7, 0x19, 0x40, 0x71, 0x3c, 0xaa, - 0xf9, 0xa2, 0xf9, 0x5c, 0xf0, 0xb4, 0x9b, 0xef, 0xd2, 0xcc, 0x52, 0xb8, 0xcf, 0xc3, 0x85, 0x74, - 0xb1, 0x89, 0x1b, 0x34, 0xb3, 0x9c, 0xbf, 0x35, 0x80, 0x91, 0x17, 0xf0, 0x21, 0x4f, 0x3c, 0x2e, - 0xde, 0xf5, 0xe2, 0x7d, 0x09, 0x55, 0x81, 0x37, 0x5d, 0xd8, 0x25, 0xe4, 0x3d, 0xd8, 0x39, 0xa0, - 0x74, 0x12, 0xd0, 0x9c, 0x47, 0xbe, 0x01, 0x93, 0x67, 0xd7, 0x5b, 0xd8, 0x3a, 0x06, 0x3d, 0xdc, - 0x09, 0xca, 0x07, 0x00, 0x2d, 0xb8, 0xe4, 0x5b, 0x00, 0x37, 0x3f, 0x6c, 0x61, 0x97, 0x31, 0xf2, - 0xe0, 0xed, 0xf5, 0xa0, 0x1b, 0x6c, 0xe7, 0x17, 0x0d, 0xea, 0xa8, 0xbc, 0xc7, 0xe4, 0xd4, 0xe5, - 0x09, 0xf9, 0x7a, 0x6b, 0x66, 0x3b, 0x3b, 0xdb, 0x6c, 0x92, 0x3b, 0x1b, 0xb3, 0x9a, 0x40, 0x79, - 0x63, 0x3a, 0xe3, 0xba, 0x98, 0x3e, 0x3a, 0x82, 0xa9, 0xe1, 0xb4, 0xa1, 0x8c, 0x93, 0xd7, 0x80, - 0x52, 0xf7, 0x65, 0x5a, 0xd1, 0x7e, 0xf7, 0x65, 0x5a, 0x51, 0xaa, 0xa6, 0xad, 0x02, 0x68, 0xd7, - 0xd2, 0x9d, 0xdf, 0x34, 0xd5, 0x06, 0x6c, 0xa6, 0xba, 0x40, 0x90, 0x07, 0x50, 0x15, 0x92, 0xc7, - 0xe3, 0x40, 0xa0, 0x38, 0x9d, 0x1a, 0xca, 0xec, 0x09, 0x95, 0x7a, 0xbe, 0x0c, 0xa7, 0x79, 0x6a, - 0xb5, 0x26, 0x0f, 0xa1, 0x26, 0x24, 0x4b, 0xa4, 0x62, 0xa7, 0x13, 0xae, 0x8a, 0x76, 0x4f, 0x90, - 0xf7, 0xc1, 0xe0, 0xe1, 0x6c, 0x8c, 0x47, 0xa5, 0x1c, 0x15, 0x1e, 0xce, 0x7a, 0x82, 0x1c, 0x40, - 0x6d, 0x91, 0x44, 0xcb, 0xd8, 0x0b, 0x17, 0x76, 0xa5, 0xa5, 0xb7, 0x4d, 0xba, 0xb6, 0x49, 0x13, - 0x4a, 0x93, 0x15, 0x4e, 0x99, 0x1a, 0x2d, 0x4d, 0x56, 0x6a, 0xf7, 0x84, 0x85, 0x0b, 0xae, 0x36, - 0xa9, 0xa6, 0xbb, 0xa3, 0xdd, 0x13, 0xce, 0x5f, 0x1a, 0x54, 0x4e, 0xdd, 0x65, 0xf8, 0x9a, 0x1c, - 0xc2, 0x5e, 0xe0, 0x85, 0x63, 0xd5, 0xd4, 0x85, 0x66, 0x33, 0xf0, 0x42, 0xd5, 0x4d, 0x3d, 0x81, - 0x7e, 0x76, 0xbd, 0xf6, 0x67, 0xe3, 0x3e, 0x60, 0xd7, 0x99, 0xff, 0x69, 0x56, 0x09, 0x1d, 0x2b, - 0xf1, 0xf1, 0x4e, 0x25, 0x30, 0x4b, 0xa7, 0x1b, 0x4e, 0xa3, 0x99, 0x17, 0x2e, 0x8a, 0x32, 0xa8, - 0xb7, 0x14, 0x3f, 0xad, 0x4e, 0x71, 0xed, 0x3c, 0x87, 0x5a, 0xce, 0xda, 0xb9, 0x4b, 0x3f, 0x0e, - 0xd4, 0x53, 0xb7, 0xf5, 0xbe, 0x95, 0xc8, 0x7b, 0xb0, 0x7f, 0xf6, 0x62, 0x70, 0x3c, 0x1a, 0x6f, - 0x3c, 0x7a, 0xce, 0x1c, 0x1a, 0x98, 0x91, 0xcf, 0xfe, 0xdf, 0x4d, 0xe8, 0x80, 0x31, 0x55, 0x71, - 0xf9, 0x45, 0xb8, 0xff, 0xdf, 0x1f, 0x42, 0x33, 0xd6, 0xc9, 0xbd, 0xdf, 0x6f, 0x0e, 0xb5, 0x3f, - 0x6f, 0x0e, 0xb5, 0x7f, 0x6e, 0x0e, 0xb5, 0x9f, 0x8c, 0xf4, 0x5f, 0xc9, 0xc4, 0xc0, 0x3f, 0x19, - 0x4f, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x6b, 0xfa, 0x90, 0x8a, 0xb4, 0x08, 0x00, 0x00, + // 1115 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xcf, 0x8e, 0x1a, 0xc7, + 0x13, 0x66, 0x18, 0x18, 0x98, 0x5a, 0x60, 0xc7, 0xfd, 0xf3, 0xcf, 0x1e, 0x6f, 0x12, 0x4c, 0x46, + 0xf9, 0x83, 0x9c, 0x88, 0x95, 0x6c, 0x2b, 0xb9, 0x38, 0x51, 0x76, 0xd7, 0xac, 0xb1, 0x62, 0x40, + 0x6e, 0x58, 0x25, 0xce, 0x05, 0x35, 0xd0, 0x0b, 0x23, 0x33, 0x7f, 0x34, 0xdd, 0x58, 0x4b, 0x9e, + 0x22, 0xe7, 0xdc, 0xf2, 0x08, 0x79, 0x0b, 0x1f, 0x7d, 0x8a, 0xa2, 0x1c, 0xa2, 0x68, 0xf7, 0x11, + 0xf2, 0x02, 0x51, 0xd7, 0xcc, 0x30, 0xb0, 0xc4, 0x52, 0x92, 0x0b, 0xea, 0xfa, 0xea, 0xab, 0xae, + 0x6f, 0xba, 0xaa, 0xab, 0x81, 0x86, 0x90, 0x41, 0xc4, 0x0f, 0xf1, 0x37, 0x1c, 0x1f, 0x86, 0x51, + 0xe0, 0x85, 0xe3, 0x43, 0xb9, 0x0a, 0xb9, 0x68, 0x85, 0x51, 0x20, 0x03, 0xb2, 0xaf, 0x30, 0x2e, + 0xe7, 0x7c, 0x29, 0x46, 0x93, 0x20, 0x5c, 0x1d, 0xdc, 0x9c, 0x05, 0xb3, 0x00, 0x7d, 0x87, 0x6a, + 0x15, 0xd3, 0x0e, 0xee, 0xc4, 0x1b, 0x2d, 0xd8, 0x98, 0x2f, 0xb6, 0x77, 0x70, 0x7e, 0xca, 0x43, + 0xad, 0xcb, 0x65, 0xe4, 0x4e, 0xba, 0x5c, 0xb2, 0x29, 0x93, 0x8c, 0x7c, 0x09, 0x05, 0xc5, 0xb0, + 0xb5, 0x86, 0xd6, 0xac, 0xdd, 0xbf, 0xd7, 0xba, 0x96, 0xa3, 0xb5, 0x4d, 0x4f, 0xcc, 0xe1, 0x2a, + 0xe4, 0x14, 0xe3, 0xc8, 0xa7, 0x40, 0x3c, 0xc4, 0x46, 0xe7, 0xcc, 0x73, 0x17, 0xab, 0x91, 0xcf, + 0x3c, 0x6e, 0xe7, 0x1b, 0x5a, 0xd3, 0xa4, 0x56, 0xec, 0x39, 0x45, 0x47, 0x8f, 0x79, 0x9c, 0x10, + 0x28, 0xcc, 0xf9, 0x22, 0xb4, 0x0b, 0xe8, 0xc7, 0xb5, 0xc2, 0x96, 0xbe, 0x2b, 0xed, 0x62, 0x8c, + 0xa9, 0xb5, 0xb3, 0x02, 0xc8, 0x32, 0x91, 0x3d, 0x28, 0x9d, 0xf5, 0xbe, 0xee, 0xf5, 0xbf, 0xe9, + 0x59, 0x39, 0x65, 0x9c, 0xf4, 0xcf, 0x7a, 0xc3, 0x36, 0xb5, 0x34, 0x62, 0x42, 0xf1, 0xc9, 0xd1, + 0xd9, 0x93, 0xb6, 0x95, 0x27, 0x55, 0x30, 0x3b, 0x4f, 0x07, 0xc3, 0xfe, 0x13, 0x7a, 0xd4, 0xb5, + 0x74, 0x42, 0xa0, 0x86, 0x9e, 0x0c, 0x2b, 0xa8, 0xd0, 0xc1, 0x59, 0xb7, 0x7b, 0x44, 0x5f, 0x58, + 0x45, 0x52, 0x86, 0xc2, 0xd3, 0xde, 0x69, 0xdf, 0x32, 0x48, 0x05, 0xca, 0x83, 0xe1, 0xd1, 0xb0, + 0x3d, 0x68, 0x0f, 0xad, 0x92, 0xf3, 0x08, 0x8c, 0x01, 0xf3, 0xc2, 0x05, 0x27, 0x37, 0xa1, 0xf8, + 0x8a, 0x2d, 0x96, 0xf1, 0xd9, 0x68, 0x34, 0x36, 0xc8, 0xbb, 0x60, 0x4a, 0xd7, 0xe3, 0x42, 0x32, + 0x2f, 0xc4, 0xef, 0xd4, 0x69, 0x06, 0x38, 0x2f, 0xa1, 0xdc, 0xbe, 0xe0, 0x5e, 0xb8, 0x60, 0x11, + 0xf9, 0x04, 0x0c, 0x2c, 0x82, 0xb0, 0xb5, 0x86, 0xde, 0xdc, 0xbb, 0x5f, 0x6d, 0xc9, 0x39, 0xf3, + 0x03, 0xd1, 0x7a, 0xa6, 0xd0, 0xe3, 0xc2, 0xeb, 0xdf, 0xef, 0xe6, 0x68, 0x42, 0xc9, 0x92, 0xe5, + 0xdf, 0x9a, 0x4c, 0xbf, 0x9e, 0xec, 0xb7, 0x22, 0x98, 0x1d, 0x57, 0xc8, 0x60, 0x16, 0x31, 0x8f, + 0xbc, 0x07, 0xe6, 0x24, 0x58, 0xfa, 0x72, 0xe4, 0xfa, 0x12, 0x25, 0x17, 0x3a, 0x39, 0x5a, 0x46, + 0xe8, 0xa9, 0x2f, 0xc9, 0xfb, 0xb0, 0x17, 0xbb, 0xcf, 0x17, 0x01, 0x93, 0x71, 0x9a, 0x4e, 0x8e, + 0x02, 0x82, 0xa7, 0x0a, 0x23, 0x16, 0xe8, 0x62, 0xe9, 0x61, 0x1e, 0x8d, 0xaa, 0x25, 0xb9, 0x05, + 0x86, 0x98, 0xcc, 0xb9, 0xc7, 0xb0, 0x62, 0x37, 0x68, 0x62, 0x91, 0x0f, 0xa1, 0xf6, 0x3d, 0x8f, + 0x82, 0x91, 0x9c, 0x47, 0x5c, 0xcc, 0x83, 0xc5, 0x14, 0xab, 0xa7, 0xd1, 0xaa, 0x42, 0x87, 0x29, + 0x48, 0x3e, 0x4a, 0x68, 0x99, 0x2e, 0x03, 0x75, 0x69, 0xb4, 0xa2, 0xf0, 0x93, 0x54, 0xdb, 0x3d, + 0xb0, 0x36, 0x78, 0xb1, 0xc0, 0x12, 0x0a, 0xd4, 0x68, 0x6d, 0xcd, 0x8c, 0x45, 0x76, 0xa0, 0xe6, + 0xf3, 0x19, 0x93, 0xee, 0x2b, 0x3e, 0x12, 0x21, 0xf3, 0x85, 0x5d, 0xc6, 0xd3, 0x7d, 0x67, 0xa7, + 0x75, 0x8f, 0x97, 0x93, 0x97, 0x5c, 0x0e, 0x42, 0xe6, 0x27, 0x67, 0x5d, 0x4d, 0x03, 0x15, 0x26, + 0xc8, 0xc7, 0xb0, 0xbf, 0xde, 0x69, 0xca, 0x17, 0x92, 0x09, 0xdb, 0x6c, 0xe8, 0x4d, 0x42, 0xd7, + 0x09, 0x1e, 0x23, 0xba, 0x45, 0x44, 0x89, 0xc2, 0x86, 0x86, 0xde, 0xd4, 0x32, 0x22, 0xea, 0x13, + 0x4a, 0x5b, 0x18, 0x08, 0x77, 0x43, 0xdb, 0xde, 0x3f, 0xd6, 0x96, 0x06, 0xae, 0xb5, 0xad, 0x77, + 0x4a, 0xb4, 0x55, 0x62, 0x6d, 0x29, 0x9c, 0x69, 0x5b, 0x13, 0x13, 0x6d, 0xd5, 0x58, 0x5b, 0x0a, + 0x27, 0xda, 0x4e, 0x00, 0x22, 0x2e, 0xb8, 0x1c, 0xcd, 0x55, 0x1d, 0x6a, 0x78, 0xdd, 0x3f, 0xd8, + 0xd1, 0xb5, 0x6e, 0xa7, 0x16, 0x55, 0xe4, 0x8e, 0xeb, 0x4b, 0x6a, 0x46, 0xe9, 0x72, 0xbb, 0x1f, + 0xf7, 0xaf, 0xf7, 0xe3, 0x43, 0x30, 0xd7, 0x51, 0xdb, 0x97, 0xb6, 0x04, 0xfa, 0x8b, 0xf6, 0xc0, + 0xd2, 0x88, 0x01, 0xf9, 0x5e, 0xdf, 0xca, 0x67, 0x17, 0x57, 0x3f, 0x2e, 0x41, 0x11, 0x85, 0x1f, + 0x57, 0x00, 0xb2, 0x2e, 0x70, 0x1e, 0x01, 0x64, 0x87, 0xa4, 0x1a, 0x31, 0x38, 0x3f, 0x17, 0x3c, + 0xee, 0xec, 0x1b, 0x34, 0xb1, 0x14, 0xbe, 0xe0, 0xfe, 0x4c, 0xce, 0xb1, 0xa1, 0xab, 0x34, 0xb1, + 0x9c, 0x3f, 0x35, 0x80, 0xa1, 0xeb, 0xf1, 0x01, 0x8f, 0x5c, 0x2e, 0xfe, 0xdd, 0x55, 0xfc, 0x1c, + 0x4a, 0x02, 0x27, 0x80, 0xb0, 0xf3, 0xc8, 0xbe, 0xbd, 0x73, 0x4c, 0xf1, 0x84, 0x48, 0xe2, 0x52, + 0x36, 0xf9, 0x02, 0x4c, 0x9e, 0x5c, 0x7e, 0x61, 0xeb, 0x18, 0x7a, 0x67, 0x27, 0x34, 0x1d, 0x0f, + 0x49, 0x70, 0x16, 0x41, 0xbe, 0x02, 0x98, 0xa7, 0xc7, 0x2f, 0xec, 0x02, 0xc6, 0x1f, 0xbc, 0xbd, + 0x42, 0xc9, 0x06, 0x1b, 0x31, 0xce, 0x8f, 0x1a, 0x54, 0xf0, 0x8b, 0xba, 0x4c, 0x4e, 0xe6, 0x3c, + 0x22, 0x9f, 0x6d, 0x4d, 0x77, 0x67, 0x67, 0xb3, 0x4d, 0x72, 0x6b, 0x63, 0xaa, 0x13, 0x28, 0x6c, + 0xcc, 0x71, 0x5c, 0x67, 0x13, 0x4a, 0x47, 0x30, 0x36, 0x9c, 0x26, 0x14, 0x70, 0x46, 0x1b, 0x90, + 0x6f, 0x3f, 0x8f, 0x2b, 0xdd, 0x6b, 0x3f, 0x8f, 0x2b, 0x4d, 0xd5, 0x5c, 0x56, 0x00, 0x6d, 0x5b, + 0xba, 0xf3, 0xb3, 0xa6, 0xda, 0x83, 0x4d, 0x55, 0x77, 0x08, 0x72, 0x1b, 0x4a, 0x42, 0xf2, 0x70, + 0xe4, 0x09, 0x14, 0xa7, 0x53, 0x43, 0x99, 0x5d, 0xa1, 0x52, 0x9f, 0x2f, 0xfd, 0x49, 0x9a, 0x5a, + 0xad, 0xc9, 0x1d, 0x28, 0x0b, 0xc9, 0x22, 0xa9, 0xd8, 0xf1, 0x14, 0x2c, 0xa1, 0xdd, 0x15, 0xe4, + 0xff, 0x60, 0x70, 0x7f, 0x3a, 0xc2, 0x03, 0x53, 0x8e, 0x22, 0xf7, 0xa7, 0x5d, 0x41, 0x0e, 0xa0, + 0x3c, 0x8b, 0x82, 0x65, 0xe8, 0xfa, 0x33, 0xbb, 0xd8, 0xd0, 0x9b, 0x26, 0x5d, 0xdb, 0xa4, 0x06, + 0xf9, 0xf1, 0x0a, 0x27, 0x51, 0x99, 0xe6, 0xc7, 0x2b, 0xb5, 0x7b, 0xc4, 0xfc, 0x19, 0x57, 0x9b, + 0x94, 0xe2, 0xdd, 0xd1, 0xee, 0x0a, 0xe7, 0x17, 0x0d, 0x8a, 0x27, 0xf3, 0xa5, 0xff, 0x92, 0xd4, + 0x61, 0xcf, 0x73, 0xfd, 0x91, 0x6a, 0xf6, 0x4c, 0xb3, 0xe9, 0xb9, 0xbe, 0xea, 0xb2, 0xae, 0x40, + 0x3f, 0xbb, 0x58, 0xfb, 0x93, 0x87, 0xc1, 0x63, 0x17, 0x89, 0xff, 0x41, 0x52, 0x09, 0x1d, 0x2b, + 0x71, 0x77, 0xa7, 0x12, 0x98, 0xa5, 0xd5, 0xf6, 0x27, 0xc1, 0xd4, 0xf5, 0x67, 0x59, 0x19, 0xd4, + 0xab, 0x8b, 0x9f, 0x56, 0xa1, 0xb8, 0x76, 0x1e, 0x43, 0x39, 0x65, 0xed, 0xdc, 0xb1, 0x6f, 0xfb, + 0xea, 0x51, 0xdc, 0x7a, 0x09, 0xf3, 0xe4, 0x7f, 0xb0, 0x7f, 0xfa, 0xac, 0x7f, 0x34, 0x1c, 0x6d, + 0x3c, 0x8f, 0x4e, 0x04, 0x55, 0xcc, 0xc8, 0xa7, 0xff, 0xe5, 0x86, 0x3c, 0x04, 0x63, 0xa2, 0xa2, + 0xd3, 0x0b, 0x72, 0xeb, 0xef, 0x3f, 0x27, 0x8d, 0x8a, 0xb9, 0xc7, 0x8d, 0xd7, 0x97, 0x75, 0xed, + 0xcd, 0x65, 0x5d, 0xfb, 0xe3, 0xb2, 0xae, 0xfd, 0x70, 0x55, 0xcf, 0xbd, 0xb9, 0xaa, 0xe7, 0x7e, + 0xbd, 0xaa, 0xe7, 0xbe, 0x33, 0xe2, 0x7f, 0x3b, 0x63, 0x03, 0xff, 0xa6, 0x3c, 0xf8, 0x2b, 0x00, + 0x00, 0xff, 0xff, 0x73, 0xfe, 0x08, 0x7f, 0x0c, 0x09, 0x00, 0x00, } func (m *MetricMetadata) Marshal() (dAtA []byte, err error) { @@ -1129,10 +1103,6 @@ func (m *MetricMetadata) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Unit) > 0 { i -= len(m.Unit) copy(dAtA[i:], m.Unit) @@ -1182,10 +1152,6 @@ func (m *Sample) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Timestamp != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Timestamp)) i-- @@ -1220,10 +1186,6 @@ func (m *Exemplar) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Timestamp != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Timestamp)) i-- @@ -1272,10 +1234,6 @@ func (m *Histogram) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Timestamp != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Timestamp)) i-- @@ -1480,10 +1438,6 @@ func (m *BucketSpan) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Length != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Length)) i-- @@ -1517,10 +1471,6 @@ func (m *TimeSeries) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Histograms) > 0 { for iNdEx := len(m.Histograms) - 1; iNdEx >= 0; iNdEx-- { { @@ -1600,10 +1550,6 @@ func (m *LabelMatcher) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Value) > 0 { i -= len(m.Value) copy(dAtA[i:], m.Value) @@ -1646,10 +1592,6 @@ func (m *ReadHints) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.RangeMs != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.RangeMs)) i-- @@ -1719,10 +1661,6 @@ func (m *Chunk) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Data) > 0 { i -= len(m.Data) copy(dAtA[i:], m.Data) @@ -1768,10 +1706,6 @@ func (m *ChunkedSeries) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Chunks) > 0 { for iNdEx := len(m.Chunks) - 1; iNdEx >= 0; iNdEx-- { { @@ -1835,9 +1769,6 @@ func (m *MetricMetadata) Size() (n int) { if l > 0 { n += 1 + l + sovTypes(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1853,9 +1784,6 @@ func (m *Sample) Size() (n int) { if m.Timestamp != 0 { n += 1 + sovTypes(uint64(m.Timestamp)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1877,9 +1805,6 @@ func (m *Exemplar) Size() (n int) { if m.Timestamp != 0 { n += 1 + sovTypes(uint64(m.Timestamp)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1942,9 +1867,6 @@ func (m *Histogram) Size() (n int) { if m.Timestamp != 0 { n += 1 + sovTypes(uint64(m.Timestamp)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1996,9 +1918,6 @@ func (m *BucketSpan) Size() (n int) { if m.Length != 0 { n += 1 + sovTypes(uint64(m.Length)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2032,9 +1951,6 @@ func (m *TimeSeries) Size() (n int) { n += 1 + l + sovTypes(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2055,9 +1971,6 @@ func (m *LabelMatcher) Size() (n int) { if l > 0 { n += 1 + l + sovTypes(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2092,9 +2005,6 @@ func (m *ReadHints) Size() (n int) { if m.RangeMs != 0 { n += 1 + sovTypes(uint64(m.RangeMs)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2117,9 +2027,6 @@ func (m *Chunk) Size() (n int) { if l > 0 { n += 1 + l + sovTypes(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2141,9 +2048,6 @@ func (m *ChunkedSeries) Size() (n int) { n += 1 + l + sovTypes(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2309,7 +2213,6 @@ func (m *MetricMetadata) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2390,7 +2293,6 @@ func (m *Sample) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2458,7 +2360,7 @@ func (m *Exemplar) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &labelpb.Label{}) + m.Labels = append(m.Labels, labelpb.Label{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2505,7 +2407,6 @@ func (m *Exemplar) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2678,7 +2579,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.NegativeSpans = append(m.NegativeSpans, &BucketSpan{}) + m.NegativeSpans = append(m.NegativeSpans, BucketSpan{}) if err := m.NegativeSpans[len(m.NegativeSpans)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2844,7 +2745,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.PositiveSpans = append(m.PositiveSpans, &BucketSpan{}) + m.PositiveSpans = append(m.PositiveSpans, BucketSpan{}) if err := m.PositiveSpans[len(m.PositiveSpans)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3031,7 +2932,6 @@ func (m *Histogram) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3122,7 +3022,6 @@ func (m *BucketSpan) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3190,7 +3089,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &labelpb.Label{}) + m.Labels = append(m.Labels, labelpb.Label{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3224,7 +3123,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Samples = append(m.Samples, &Sample{}) + m.Samples = append(m.Samples, Sample{}) if err := m.Samples[len(m.Samples)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3258,7 +3157,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Exemplars = append(m.Exemplars, &Exemplar{}) + m.Exemplars = append(m.Exemplars, Exemplar{}) if err := m.Exemplars[len(m.Exemplars)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3292,7 +3191,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Histograms = append(m.Histograms, &Histogram{}) + m.Histograms = append(m.Histograms, Histogram{}) if err := m.Histograms[len(m.Histograms)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3309,7 +3208,6 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3443,7 +3341,6 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3654,7 +3551,6 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3796,7 +3692,6 @@ func (m *Chunk) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3864,7 +3759,7 @@ func (m *ChunkedSeries) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &labelpb.Label{}) + m.Labels = append(m.Labels, labelpb.Label{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3898,7 +3793,7 @@ func (m *ChunkedSeries) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Chunks = append(m.Chunks, &Chunk{}) + m.Chunks = append(m.Chunks, Chunk{}) if err := m.Chunks[len(m.Chunks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3915,7 +3810,6 @@ func (m *ChunkedSeries) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/store/storepb/prompb/types.proto b/pkg/store/storepb/prompb/types.proto index db6829f401..e508acfd09 100644 --- a/pkg/store/storepb/prompb/types.proto +++ b/pkg/store/storepb/prompb/types.proto @@ -19,8 +19,14 @@ package prometheus_copy; option go_package = "prompb"; +import "gogoproto/gogo.proto"; import "store/labelpb/types.proto"; +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; message MetricMetadata { enum MetricType { @@ -49,7 +55,7 @@ message Sample { message Exemplar { // Optional, can be empty. - repeated thanos.Label labels = 1; + repeated thanos.Label labels = 1 [(gogoproto.nullable) = false]; double value = 2; // timestamp is in ms format, see pkg/timestamp/timestamp.go for // conversion from time.Time to Prometheus timestamp. @@ -90,7 +96,7 @@ message Histogram { } // Negative Buckets. - repeated BucketSpan negative_spans = 8; + repeated BucketSpan negative_spans = 8 [(gogoproto.nullable) = false]; // Use either "negative_deltas" or "negative_counts", the former for // regular histograms with integer counts, the latter for float // histograms. @@ -98,7 +104,7 @@ message Histogram { repeated double negative_counts = 10; // Absolute count of each bucket. // Positive Buckets. - repeated BucketSpan positive_spans = 11; + repeated BucketSpan positive_spans = 11 [(gogoproto.nullable) = false]; // Use either "positive_deltas" or "positive_counts", the former for // regular histograms with integer counts, the latter for float // histograms. @@ -125,10 +131,10 @@ message BucketSpan { // TimeSeries represents samples and labels for a single time series. message TimeSeries { // Labels have to be sorted by label names and without duplicated label names. - repeated thanos.Label labels = 1; - repeated Sample samples = 2; - repeated Exemplar exemplars = 3; - repeated Histogram histograms = 4; + repeated thanos.Label labels = 1 [(gogoproto.nullable) = false]; + repeated Sample samples = 2 [(gogoproto.nullable) = false]; + repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false]; + repeated Histogram histograms = 4 [(gogoproto.nullable) = false]; } // Matcher specifies a rule, which can match or set of labels or not. @@ -174,7 +180,7 @@ message Chunk { // ChunkedSeries represents single, encoded time series. message ChunkedSeries { // Labels should be sorted. - repeated thanos.Label labels = 1; + repeated thanos.Label labels = 1 [(gogoproto.nullable) = false]; // Chunks will be in start time order and may overlap. - repeated Chunk chunks = 2; + repeated Chunk chunks = 2 [(gogoproto.nullable) = false]; } diff --git a/pkg/store/storepb/query_hints.go b/pkg/store/storepb/query_hints.go index 479441d75b..956481a584 100644 --- a/pkg/store/storepb/query_hints.go +++ b/pkg/store/storepb/query_hints.go @@ -8,7 +8,7 @@ import ( "strings" ) -func (m *QueryHints) toPromQL(labelMatchers []*LabelMatcher) string { +func (m *QueryHints) toPromQL(labelMatchers []LabelMatcher) string { grouping := m.Grouping.toPromQL() matchers := MatchersToString(labelMatchers...) queryRange := m.Range.toPromQL() diff --git a/pkg/store/storepb/rpc.pb.go b/pkg/store/storepb/rpc.pb.go index e4be49a546..0c2d67dfb1 100644 --- a/pkg/store/storepb/rpc.pb.go +++ b/pkg/store/storepb/rpc.pb.go @@ -10,6 +10,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" types "github.com/gogo/protobuf/types" prompb "github.com/thanos-io/thanos/pkg/store/storepb/prompb" @@ -67,9 +68,6 @@ func (Aggr) EnumDescriptor() ([]byte, []int) { } type WriteResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *WriteResponse) Reset() { *m = WriteResponse{} } @@ -106,12 +104,9 @@ func (m *WriteResponse) XXX_DiscardUnknown() { var xxx_messageInfo_WriteResponse proto.InternalMessageInfo type WriteRequest struct { - Timeseries []*prompb.TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries,omitempty"` - Tenant string `protobuf:"bytes,2,opt,name=tenant,proto3" json:"tenant,omitempty"` - Replica int64 `protobuf:"varint,3,opt,name=replica,proto3" json:"replica,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timeseries []prompb.TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` + Tenant string `protobuf:"bytes,2,opt,name=tenant,proto3" json:"tenant,omitempty"` + Replica int64 `protobuf:"varint,3,opt,name=replica,proto3" json:"replica,omitempty"` } func (m *WriteRequest) Reset() { *m = WriteRequest{} } @@ -147,33 +142,12 @@ func (m *WriteRequest) XXX_DiscardUnknown() { var xxx_messageInfo_WriteRequest proto.InternalMessageInfo -func (m *WriteRequest) GetTimeseries() []*prompb.TimeSeries { - if m != nil { - return m.Timeseries - } - return nil -} - -func (m *WriteRequest) GetTenant() string { - if m != nil { - return m.Tenant - } - return "" -} - -func (m *WriteRequest) GetReplica() int64 { - if m != nil { - return m.Replica - } - return 0 -} - type SeriesRequest struct { - MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` - MaxTime int64 `protobuf:"varint,2,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` - Matchers []*LabelMatcher `protobuf:"bytes,3,rep,name=matchers,proto3" json:"matchers,omitempty"` - MaxResolutionWindow int64 `protobuf:"varint,4,opt,name=max_resolution_window,json=maxResolutionWindow,proto3" json:"max_resolution_window,omitempty"` - Aggregates []Aggr `protobuf:"varint,5,rep,packed,name=aggregates,proto3,enum=thanos.Aggr" json:"aggregates,omitempty"` + MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` + MaxTime int64 `protobuf:"varint,2,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` + Matchers []LabelMatcher `protobuf:"bytes,3,rep,name=matchers,proto3" json:"matchers"` + MaxResolutionWindow int64 `protobuf:"varint,4,opt,name=max_resolution_window,json=maxResolutionWindow,proto3" json:"max_resolution_window,omitempty"` + Aggregates []Aggr `protobuf:"varint,5,rep,packed,name=aggregates,proto3,enum=thanos.Aggr" json:"aggregates,omitempty"` // Deprecated. Use partial_response_strategy instead. PartialResponseDisabled bool `protobuf:"varint,6,opt,name=partial_response_disabled,json=partialResponseDisabled,proto3" json:"partial_response_disabled,omitempty"` // TODO(bwplotka): Move Thanos components to use strategy instead. Including QueryAPI. @@ -207,10 +181,7 @@ type SeriesRequest struct { // server supports it. WithoutReplicaLabels []string `protobuf:"bytes,14,rep,name=without_replica_labels,json=withoutReplicaLabels,proto3" json:"without_replica_labels,omitempty"` // limit is used to limit the number of results returned - Limit int64 `protobuf:"varint,15,opt,name=limit,proto3" json:"limit,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Limit int64 `protobuf:"varint,15,opt,name=limit,proto3" json:"limit,omitempty"` } func (m *SeriesRequest) Reset() { *m = SeriesRequest{} } @@ -246,111 +217,6 @@ func (m *SeriesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_SeriesRequest proto.InternalMessageInfo -func (m *SeriesRequest) GetMinTime() int64 { - if m != nil { - return m.MinTime - } - return 0 -} - -func (m *SeriesRequest) GetMaxTime() int64 { - if m != nil { - return m.MaxTime - } - return 0 -} - -func (m *SeriesRequest) GetMatchers() []*LabelMatcher { - if m != nil { - return m.Matchers - } - return nil -} - -func (m *SeriesRequest) GetMaxResolutionWindow() int64 { - if m != nil { - return m.MaxResolutionWindow - } - return 0 -} - -func (m *SeriesRequest) GetAggregates() []Aggr { - if m != nil { - return m.Aggregates - } - return nil -} - -func (m *SeriesRequest) GetPartialResponseDisabled() bool { - if m != nil { - return m.PartialResponseDisabled - } - return false -} - -func (m *SeriesRequest) GetPartialResponseStrategy() PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return PartialResponseStrategy_WARN -} - -func (m *SeriesRequest) GetSkipChunks() bool { - if m != nil { - return m.SkipChunks - } - return false -} - -func (m *SeriesRequest) GetHints() *types.Any { - if m != nil { - return m.Hints - } - return nil -} - -func (m *SeriesRequest) GetStep() int64 { - if m != nil { - return m.Step - } - return 0 -} - -func (m *SeriesRequest) GetRange() int64 { - if m != nil { - return m.Range - } - return 0 -} - -func (m *SeriesRequest) GetQueryHints() *QueryHints { - if m != nil { - return m.QueryHints - } - return nil -} - -func (m *SeriesRequest) GetShardInfo() *ShardInfo { - if m != nil { - return m.ShardInfo - } - return nil -} - -func (m *SeriesRequest) GetWithoutReplicaLabels() []string { - if m != nil { - return m.WithoutReplicaLabels - } - return nil -} - -func (m *SeriesRequest) GetLimit() int64 { - if m != nil { - return m.Limit - } - return 0 -} - // QueryHints represents hints from PromQL that might help to // pre-aggregate or prepare series for faster use by clients. // Analogous to storage.SelectHints plus additional info. @@ -363,10 +229,7 @@ type QueryHints struct { // The grouping expression Grouping *Grouping `protobuf:"bytes,4,opt,name=grouping,proto3" json:"grouping,omitempty"` // Range vector selector. - Range *Range `protobuf:"bytes,5,opt,name=range,proto3" json:"range,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Range *Range `protobuf:"bytes,5,opt,name=range,proto3" json:"range,omitempty"` } func (m *QueryHints) Reset() { *m = QueryHints{} } @@ -402,34 +265,6 @@ func (m *QueryHints) XXX_DiscardUnknown() { var xxx_messageInfo_QueryHints proto.InternalMessageInfo -func (m *QueryHints) GetStepMillis() int64 { - if m != nil { - return m.StepMillis - } - return 0 -} - -func (m *QueryHints) GetFunc() *Func { - if m != nil { - return m.Func - } - return nil -} - -func (m *QueryHints) GetGrouping() *Grouping { - if m != nil { - return m.Grouping - } - return nil -} - -func (m *QueryHints) GetRange() *Range { - if m != nil { - return m.Range - } - return nil -} - // ShardInfo are the parameters used to shard series in Stores. type ShardInfo struct { // The index of the current shard. @@ -439,10 +274,7 @@ type ShardInfo struct { // Group by or without labels. By bool `protobuf:"varint,3,opt,name=by,proto3" json:"by,omitempty"` // Labels on which to partition series. - Labels []string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty"` } func (m *ShardInfo) Reset() { *m = ShardInfo{} } @@ -478,40 +310,9 @@ func (m *ShardInfo) XXX_DiscardUnknown() { var xxx_messageInfo_ShardInfo proto.InternalMessageInfo -func (m *ShardInfo) GetShardIndex() int64 { - if m != nil { - return m.ShardIndex - } - return 0 -} - -func (m *ShardInfo) GetTotalShards() int64 { - if m != nil { - return m.TotalShards - } - return 0 -} - -func (m *ShardInfo) GetBy() bool { - if m != nil { - return m.By - } - return false -} - -func (m *ShardInfo) GetLabels() []string { - if m != nil { - return m.Labels - } - return nil -} - type Func struct { // The function or aggregation name - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` } func (m *Func) Reset() { *m = Func{} } @@ -547,21 +348,11 @@ func (m *Func) XXX_DiscardUnknown() { var xxx_messageInfo_Func proto.InternalMessageInfo -func (m *Func) GetName() string { - if m != nil { - return m.Name - } - return "" -} - type Grouping struct { // Indicate whether it is without or by. By bool `protobuf:"varint,1,opt,name=by,proto3" json:"by,omitempty"` // List of label names used in the grouping. - Labels []string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty"` } func (m *Grouping) Reset() { *m = Grouping{} } @@ -597,25 +388,8 @@ func (m *Grouping) XXX_DiscardUnknown() { var xxx_messageInfo_Grouping proto.InternalMessageInfo -func (m *Grouping) GetBy() bool { - if m != nil { - return m.By - } - return false -} - -func (m *Grouping) GetLabels() []string { - if m != nil { - return m.Labels - } - return nil -} - type Range struct { - Millis int64 `protobuf:"varint,1,opt,name=millis,proto3" json:"millis,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Millis int64 `protobuf:"varint,1,opt,name=millis,proto3" json:"millis,omitempty"` } func (m *Range) Reset() { *m = Range{} } @@ -651,22 +425,12 @@ func (m *Range) XXX_DiscardUnknown() { var xxx_messageInfo_Range proto.InternalMessageInfo -func (m *Range) GetMillis() int64 { - if m != nil { - return m.Millis - } - return 0 -} - type SeriesResponse struct { // Types that are valid to be assigned to Result: // *SeriesResponse_Series // *SeriesResponse_Warning // *SeriesResponse_Hints - Result isSeriesResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isSeriesResponse_Result `protobuf_oneof:"result"` } func (m *SeriesResponse) Reset() { *m = SeriesResponse{} } @@ -768,15 +532,12 @@ type LabelNamesRequest struct { // hints is an opaque data structure that can be used to carry additional information. // The content of this field and whether it's supported depends on the // implementation of a specific store. - Hints *types.Any `protobuf:"bytes,5,opt,name=hints,proto3" json:"hints,omitempty"` - Matchers []*LabelMatcher `protobuf:"bytes,6,rep,name=matchers,proto3" json:"matchers,omitempty"` + Hints *types.Any `protobuf:"bytes,5,opt,name=hints,proto3" json:"hints,omitempty"` + Matchers []LabelMatcher `protobuf:"bytes,6,rep,name=matchers,proto3" json:"matchers"` // same as in series request. WithoutReplicaLabels []string `protobuf:"bytes,7,rep,name=without_replica_labels,json=withoutReplicaLabels,proto3" json:"without_replica_labels,omitempty"` // limit is used to limit the number of results returned - Limit int64 `protobuf:"varint,8,opt,name=limit,proto3" json:"limit,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Limit int64 `protobuf:"varint,8,opt,name=limit,proto3" json:"limit,omitempty"` } func (m *LabelNamesRequest) Reset() { *m = LabelNamesRequest{} } @@ -812,72 +573,13 @@ func (m *LabelNamesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_LabelNamesRequest proto.InternalMessageInfo -func (m *LabelNamesRequest) GetPartialResponseDisabled() bool { - if m != nil { - return m.PartialResponseDisabled - } - return false -} - -func (m *LabelNamesRequest) GetPartialResponseStrategy() PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return PartialResponseStrategy_WARN -} - -func (m *LabelNamesRequest) GetStart() int64 { - if m != nil { - return m.Start - } - return 0 -} - -func (m *LabelNamesRequest) GetEnd() int64 { - if m != nil { - return m.End - } - return 0 -} - -func (m *LabelNamesRequest) GetHints() *types.Any { - if m != nil { - return m.Hints - } - return nil -} - -func (m *LabelNamesRequest) GetMatchers() []*LabelMatcher { - if m != nil { - return m.Matchers - } - return nil -} - -func (m *LabelNamesRequest) GetWithoutReplicaLabels() []string { - if m != nil { - return m.WithoutReplicaLabels - } - return nil -} - -func (m *LabelNamesRequest) GetLimit() int64 { - if m != nil { - return m.Limit - } - return 0 -} - type LabelNamesResponse struct { Names []string `protobuf:"bytes,1,rep,name=names,proto3" json:"names,omitempty"` Warnings []string `protobuf:"bytes,2,rep,name=warnings,proto3" json:"warnings,omitempty"` - // / hints is an opaque data structure that can be used to carry additional information from - // / the store. The content of this field and whether it's supported depends on the - // / implementation of a specific store. - Hints *types.Any `protobuf:"bytes,3,opt,name=hints,proto3" json:"hints,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + /// hints is an opaque data structure that can be used to carry additional information from + /// the store. The content of this field and whether it's supported depends on the + /// implementation of a specific store. + Hints *types.Any `protobuf:"bytes,3,opt,name=hints,proto3" json:"hints,omitempty"` } func (m *LabelNamesResponse) Reset() { *m = LabelNamesResponse{} } @@ -913,27 +615,6 @@ func (m *LabelNamesResponse) XXX_DiscardUnknown() { var xxx_messageInfo_LabelNamesResponse proto.InternalMessageInfo -func (m *LabelNamesResponse) GetNames() []string { - if m != nil { - return m.Names - } - return nil -} - -func (m *LabelNamesResponse) GetWarnings() []string { - if m != nil { - return m.Warnings - } - return nil -} - -func (m *LabelNamesResponse) GetHints() *types.Any { - if m != nil { - return m.Hints - } - return nil -} - type LabelValuesRequest struct { Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` PartialResponseDisabled bool `protobuf:"varint,2,opt,name=partial_response_disabled,json=partialResponseDisabled,proto3" json:"partial_response_disabled,omitempty"` @@ -944,15 +625,12 @@ type LabelValuesRequest struct { // hints is an opaque data structure that can be used to carry additional information. // The content of this field and whether it's supported depends on the // implementation of a specific store. - Hints *types.Any `protobuf:"bytes,6,opt,name=hints,proto3" json:"hints,omitempty"` - Matchers []*LabelMatcher `protobuf:"bytes,7,rep,name=matchers,proto3" json:"matchers,omitempty"` + Hints *types.Any `protobuf:"bytes,6,opt,name=hints,proto3" json:"hints,omitempty"` + Matchers []LabelMatcher `protobuf:"bytes,7,rep,name=matchers,proto3" json:"matchers"` // same as in series request. WithoutReplicaLabels []string `protobuf:"bytes,8,rep,name=without_replica_labels,json=withoutReplicaLabels,proto3" json:"without_replica_labels,omitempty"` // limit is used to limit the number of results returned - Limit int64 `protobuf:"varint,9,opt,name=limit,proto3" json:"limit,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Limit int64 `protobuf:"varint,9,opt,name=limit,proto3" json:"limit,omitempty"` } func (m *LabelValuesRequest) Reset() { *m = LabelValuesRequest{} } @@ -988,79 +666,13 @@ func (m *LabelValuesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_LabelValuesRequest proto.InternalMessageInfo -func (m *LabelValuesRequest) GetLabel() string { - if m != nil { - return m.Label - } - return "" -} - -func (m *LabelValuesRequest) GetPartialResponseDisabled() bool { - if m != nil { - return m.PartialResponseDisabled - } - return false -} - -func (m *LabelValuesRequest) GetPartialResponseStrategy() PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return PartialResponseStrategy_WARN -} - -func (m *LabelValuesRequest) GetStart() int64 { - if m != nil { - return m.Start - } - return 0 -} - -func (m *LabelValuesRequest) GetEnd() int64 { - if m != nil { - return m.End - } - return 0 -} - -func (m *LabelValuesRequest) GetHints() *types.Any { - if m != nil { - return m.Hints - } - return nil -} - -func (m *LabelValuesRequest) GetMatchers() []*LabelMatcher { - if m != nil { - return m.Matchers - } - return nil -} - -func (m *LabelValuesRequest) GetWithoutReplicaLabels() []string { - if m != nil { - return m.WithoutReplicaLabels - } - return nil -} - -func (m *LabelValuesRequest) GetLimit() int64 { - if m != nil { - return m.Limit - } - return 0 -} - type LabelValuesResponse struct { Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` Warnings []string `protobuf:"bytes,2,rep,name=warnings,proto3" json:"warnings,omitempty"` - // / hints is an opaque data structure that can be used to carry additional information from - // / the store. The content of this field and whether it's supported depends on the - // / implementation of a specific store. - Hints *types.Any `protobuf:"bytes,3,opt,name=hints,proto3" json:"hints,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + /// hints is an opaque data structure that can be used to carry additional information from + /// the store. The content of this field and whether it's supported depends on the + /// implementation of a specific store. + Hints *types.Any `protobuf:"bytes,3,opt,name=hints,proto3" json:"hints,omitempty"` } func (m *LabelValuesResponse) Reset() { *m = LabelValuesResponse{} } @@ -1096,27 +708,6 @@ func (m *LabelValuesResponse) XXX_DiscardUnknown() { var xxx_messageInfo_LabelValuesResponse proto.InternalMessageInfo -func (m *LabelValuesResponse) GetValues() []string { - if m != nil { - return m.Values - } - return nil -} - -func (m *LabelValuesResponse) GetWarnings() []string { - if m != nil { - return m.Warnings - } - return nil -} - -func (m *LabelValuesResponse) GetHints() *types.Any { - if m != nil { - return m.Hints - } - return nil -} - func init() { proto.RegisterEnum("thanos.Aggr", Aggr_name, Aggr_value) proto.RegisterType((*WriteResponse)(nil), "thanos.WriteResponse") @@ -1137,77 +728,79 @@ func init() { func init() { proto.RegisterFile("store/storepb/rpc.proto", fileDescriptor_a938d55a388af629) } var fileDescriptor_a938d55a388af629 = []byte{ - // 1113 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcd, 0x6e, 0x1b, 0x37, - 0x10, 0xf6, 0x6a, 0xb5, 0xfa, 0x19, 0xd9, 0x8a, 0xc2, 0xd8, 0xce, 0x5a, 0x01, 0x6c, 0x55, 0xbd, - 0x08, 0x81, 0x21, 0x1b, 0x4a, 0x2f, 0xfd, 0xb9, 0xd8, 0x6e, 0x53, 0x07, 0xa8, 0xdd, 0x96, 0x4e, - 0xea, 0xa2, 0x3d, 0x08, 0x94, 0x44, 0xaf, 0x16, 0xd9, 0x3f, 0x2f, 0xb9, 0xb5, 0x75, 0xe8, 0x2b, - 0xf4, 0xd6, 0x5b, 0x4f, 0x7d, 0x9a, 0xde, 0xda, 0x47, 0x28, 0xfc, 0x20, 0x45, 0xc1, 0x21, 0x57, - 0x3f, 0xae, 0x92, 0xb8, 0x70, 0x2e, 0x02, 0x67, 0xbe, 0xd9, 0xe1, 0xf0, 0xe3, 0x37, 0x23, 0xc2, - 0x63, 0x21, 0xe3, 0x94, 0xef, 0xe1, 0x6f, 0x32, 0xd8, 0x4b, 0x93, 0x61, 0x37, 0x49, 0x63, 0x19, - 0x93, 0x92, 0x1c, 0xb3, 0x28, 0x16, 0xcd, 0xad, 0xc5, 0x00, 0x39, 0x49, 0xb8, 0xd0, 0x21, 0xcd, - 0xd6, 0x22, 0x94, 0xa4, 0x71, 0x78, 0x2b, 0x62, 0xcb, 0x8b, 0x63, 0x2f, 0xe0, 0x7b, 0x68, 0x0d, - 0xb2, 0x8b, 0x3d, 0x16, 0x4d, 0x34, 0xd4, 0x7e, 0x00, 0x6b, 0xe7, 0xa9, 0x2f, 0x39, 0xe5, 0x22, - 0x89, 0x23, 0xc1, 0xdb, 0x3f, 0xc3, 0xaa, 0x71, 0x5c, 0x66, 0x5c, 0x48, 0xf2, 0x29, 0x80, 0xf4, - 0x43, 0x2e, 0x78, 0xea, 0x73, 0xe1, 0x5a, 0x2d, 0xbb, 0x53, 0xeb, 0x3d, 0x51, 0x1f, 0x87, 0x5c, - 0x8e, 0x79, 0x26, 0xfa, 0xc3, 0x38, 0x99, 0x74, 0x5f, 0xfa, 0x21, 0x3f, 0xc3, 0x10, 0x3a, 0x17, - 0x4e, 0x36, 0xa1, 0x24, 0x79, 0xc4, 0x22, 0xe9, 0x16, 0x5a, 0x56, 0xa7, 0x4a, 0x8d, 0x45, 0x5c, - 0x28, 0xa7, 0x3c, 0x09, 0xfc, 0x21, 0x73, 0xed, 0x96, 0xd5, 0xb1, 0x69, 0x6e, 0xb6, 0x7f, 0x73, - 0x60, 0xcd, 0x24, 0x32, 0x05, 0x6c, 0x41, 0x25, 0xf4, 0xa3, 0xbe, 0xca, 0xea, 0x5a, 0x3a, 0x38, - 0xf4, 0x23, 0xb5, 0x21, 0x42, 0xec, 0x5a, 0x43, 0x05, 0x03, 0xb1, 0x6b, 0x84, 0xf6, 0x15, 0x24, - 0x87, 0x63, 0x9e, 0x0a, 0xd7, 0xc6, 0xa2, 0xd7, 0xbb, 0x9a, 0xca, 0xee, 0x57, 0x6c, 0xc0, 0x83, - 0x13, 0x0d, 0xd2, 0x69, 0x14, 0xe9, 0xc1, 0x86, 0x4a, 0x96, 0x72, 0x11, 0x07, 0x99, 0xf4, 0xe3, - 0xa8, 0x7f, 0xe5, 0x47, 0xa3, 0xf8, 0xca, 0x2d, 0x62, 0xe6, 0x47, 0x21, 0xbb, 0xa6, 0x53, 0xec, - 0x1c, 0x21, 0xb2, 0x0b, 0xc0, 0x3c, 0x2f, 0xe5, 0x1e, 0x93, 0x5c, 0xb8, 0x4e, 0xcb, 0xee, 0xd4, - 0x7b, 0xab, 0xf9, 0x3e, 0x07, 0x9e, 0x97, 0xd2, 0x39, 0x9c, 0x7c, 0x02, 0x5b, 0x09, 0x4b, 0xa5, - 0xcf, 0x02, 0xb5, 0x0b, 0xd2, 0xdd, 0x1f, 0xf9, 0x82, 0x0d, 0x02, 0x3e, 0x72, 0x4b, 0x2d, 0xab, - 0x53, 0xa1, 0x8f, 0x4d, 0x40, 0x7e, 0x1d, 0x9f, 0x1b, 0x98, 0xfc, 0xb8, 0xe4, 0x5b, 0x21, 0x53, - 0x26, 0xb9, 0x37, 0x71, 0xcb, 0x2d, 0xab, 0x53, 0xef, 0xed, 0xe4, 0x1b, 0x7f, 0xb3, 0x98, 0xe3, - 0xcc, 0x84, 0xfd, 0x27, 0x79, 0x0e, 0x90, 0x1d, 0xa8, 0x89, 0xd7, 0x7e, 0xd2, 0x1f, 0x8e, 0xb3, - 0xe8, 0xb5, 0x70, 0x2b, 0x58, 0x0a, 0x28, 0xd7, 0x11, 0x7a, 0xc8, 0x53, 0x70, 0xc6, 0x7e, 0x24, - 0x85, 0x5b, 0x6d, 0x59, 0x48, 0xa5, 0x16, 0x54, 0x37, 0x17, 0x54, 0xf7, 0x20, 0x9a, 0x50, 0x1d, - 0x42, 0x08, 0x14, 0x85, 0xe4, 0x89, 0x0b, 0x48, 0x1b, 0xae, 0xc9, 0x3a, 0x38, 0x29, 0x8b, 0x3c, - 0xee, 0xd6, 0xd0, 0xa9, 0x0d, 0xf2, 0x0c, 0x6a, 0x97, 0x19, 0x4f, 0x27, 0x7d, 0x9d, 0x7b, 0x15, - 0x73, 0x93, 0xfc, 0x14, 0xdf, 0x2a, 0xe8, 0x58, 0x21, 0x14, 0x2e, 0xa7, 0x6b, 0xb2, 0x0f, 0x20, - 0xc6, 0x2c, 0x1d, 0xf5, 0xfd, 0xe8, 0x22, 0x76, 0xd7, 0xf0, 0x9b, 0x87, 0xf9, 0x37, 0x67, 0x0a, - 0x79, 0x11, 0x5d, 0xc4, 0xb4, 0x2a, 0xf2, 0x25, 0xf9, 0x08, 0x36, 0xaf, 0x7c, 0x39, 0x8e, 0x33, - 0xd9, 0x37, 0x2a, 0xeb, 0x07, 0x4a, 0x02, 0xc2, 0xad, 0xb7, 0xec, 0x4e, 0x95, 0xae, 0x1b, 0x94, - 0x6a, 0x10, 0xe5, 0x21, 0x54, 0xc9, 0x81, 0x1f, 0xfa, 0xd2, 0x7d, 0xa0, 0x4b, 0x46, 0xa3, 0xfd, - 0xbb, 0x05, 0x30, 0x2b, 0x0c, 0x89, 0x93, 0x3c, 0xe9, 0x87, 0x7e, 0x10, 0xf8, 0xc2, 0xc8, 0x13, - 0x94, 0xeb, 0x04, 0x3d, 0xa4, 0x05, 0xc5, 0x8b, 0x2c, 0x1a, 0xa2, 0x3a, 0x6b, 0x33, 0x69, 0x3c, - 0xcf, 0xa2, 0x21, 0x45, 0x84, 0xec, 0x42, 0xc5, 0x4b, 0xe3, 0x2c, 0xf1, 0x23, 0x0f, 0x95, 0x56, - 0xeb, 0x35, 0xf2, 0xa8, 0x2f, 0x8d, 0x9f, 0x4e, 0x23, 0xc8, 0x87, 0x39, 0x91, 0x0e, 0x86, 0xae, - 0xe5, 0xa1, 0x54, 0x39, 0x0d, 0xaf, 0xed, 0x2b, 0xa8, 0x4e, 0x89, 0xc0, 0x12, 0x0d, 0x5f, 0x23, - 0x7e, 0x3d, 0x2d, 0x51, 0xe3, 0x23, 0x7e, 0x4d, 0x3e, 0x80, 0x55, 0x19, 0x4b, 0x16, 0xf4, 0xd1, - 0x27, 0x4c, 0x23, 0xd5, 0xd0, 0x87, 0x69, 0x04, 0xa9, 0x43, 0x61, 0x30, 0xc1, 0x4e, 0xad, 0xd0, - 0xc2, 0x60, 0xa2, 0xda, 0xda, 0x30, 0x58, 0x44, 0x06, 0x8d, 0xd5, 0x6e, 0x42, 0x51, 0x9d, 0x4c, - 0x49, 0x20, 0x62, 0xa6, 0x5d, 0xab, 0x14, 0xd7, 0xed, 0x1e, 0x54, 0xf2, 0xf3, 0x98, 0x7c, 0xd6, - 0x92, 0x7c, 0xf6, 0x42, 0xbe, 0x1d, 0x70, 0xf0, 0x60, 0x2a, 0x60, 0x81, 0x62, 0x63, 0xb5, 0x7f, - 0xb1, 0xa0, 0x9e, 0x4f, 0x0b, 0xad, 0x69, 0xd2, 0x81, 0xd2, 0x74, 0x56, 0x29, 0x8a, 0xea, 0x53, - 0x6d, 0xa0, 0xf7, 0x78, 0x85, 0x1a, 0x9c, 0x34, 0xa1, 0x7c, 0xc5, 0xd2, 0x48, 0x11, 0x8f, 0xd3, - 0xe9, 0x78, 0x85, 0xe6, 0x0e, 0xb2, 0x9b, 0x0b, 0xde, 0x7e, 0xb3, 0xe0, 0x8f, 0x57, 0x8c, 0xe4, - 0x0f, 0x2b, 0x50, 0x4a, 0xb9, 0xc8, 0x02, 0xd9, 0xfe, 0xa7, 0x00, 0x0f, 0x51, 0x40, 0xa7, 0x2c, - 0x9c, 0x8d, 0xb0, 0xb7, 0x36, 0xbe, 0x75, 0x8f, 0xc6, 0x2f, 0xdc, 0xb3, 0xf1, 0xd7, 0xc1, 0x11, - 0x92, 0xa5, 0xd2, 0x4c, 0x61, 0x6d, 0x90, 0x06, 0xd8, 0x3c, 0x1a, 0x99, 0xb9, 0xa7, 0x96, 0xb3, - 0xfe, 0x77, 0xde, 0xdd, 0xff, 0xf3, 0x93, 0xb7, 0x74, 0xa7, 0xc9, 0xfb, 0xe6, 0x06, 0x2d, 0xdf, - 0xa5, 0x41, 0x2b, 0xf3, 0x0d, 0x9a, 0x02, 0x99, 0xe7, 0xdf, 0x88, 0x62, 0x1d, 0x1c, 0x25, 0x42, - 0xfd, 0xff, 0x55, 0xa5, 0xda, 0x20, 0x4d, 0xa8, 0x98, 0xfb, 0x56, 0xaa, 0x57, 0xc0, 0xd4, 0x9e, - 0x9d, 0xd8, 0x7e, 0xe7, 0x89, 0xdb, 0xbf, 0xda, 0x66, 0xd3, 0xef, 0x58, 0x90, 0xcd, 0x6e, 0x5d, - 0x15, 0xa8, 0xbc, 0xa6, 0x0d, 0xb4, 0xf1, 0x76, 0x2d, 0x14, 0xee, 0xa1, 0x05, 0xfb, 0x7d, 0x69, - 0xa1, 0xb8, 0x44, 0x0b, 0xce, 0x12, 0x2d, 0x94, 0xfe, 0x9f, 0x16, 0xca, 0xf7, 0xd4, 0x42, 0xe5, - 0x2e, 0x5a, 0xa8, 0xce, 0x6b, 0x21, 0x83, 0x47, 0x0b, 0xd7, 0x62, 0xc4, 0xb0, 0x09, 0xa5, 0x9f, - 0xd0, 0x63, 0xd4, 0x60, 0xac, 0xf7, 0x25, 0x87, 0xa7, 0x87, 0x50, 0x54, 0x7f, 0xfd, 0xa4, 0x0c, - 0x36, 0x3d, 0x38, 0x6f, 0xac, 0x90, 0x2a, 0x38, 0x47, 0x5f, 0xbf, 0x3a, 0x7d, 0xd9, 0xb0, 0x94, - 0xef, 0xec, 0xd5, 0x49, 0xa3, 0xa0, 0x16, 0x27, 0x2f, 0x4e, 0x1b, 0x36, 0x2e, 0x0e, 0xbe, 0x6f, - 0x14, 0x49, 0x0d, 0xca, 0x18, 0xf5, 0x05, 0x6d, 0x38, 0xbd, 0x3f, 0x2d, 0x70, 0xce, 0xd4, 0x83, - 0x8e, 0x7c, 0x0c, 0x25, 0x3d, 0xb9, 0xc8, 0xc6, 0xe2, 0x24, 0x33, 0x32, 0x6b, 0x6e, 0xde, 0x76, - 0xeb, 0x63, 0xee, 0x5b, 0xe4, 0x08, 0x60, 0xd6, 0x0b, 0x64, 0x6b, 0x81, 0xf9, 0xf9, 0xf9, 0xd4, - 0x6c, 0x2e, 0x83, 0x0c, 0x5b, 0xcf, 0xa1, 0x36, 0x47, 0x22, 0x59, 0x0c, 0x5d, 0x10, 0x7c, 0xf3, - 0xc9, 0x52, 0x4c, 0xe7, 0xe9, 0x9d, 0x42, 0x1d, 0xdf, 0x95, 0x4a, 0xc9, 0xfa, 0x64, 0x9f, 0x41, - 0x8d, 0xf2, 0x30, 0x96, 0x1c, 0xfd, 0x64, 0xaa, 0x8c, 0xf9, 0xe7, 0x67, 0x73, 0xe3, 0x96, 0xd7, - 0xbc, 0x52, 0x57, 0x0e, 0x37, 0xfe, 0xb8, 0xd9, 0xb6, 0xfe, 0xba, 0xd9, 0xb6, 0xfe, 0xbe, 0xd9, - 0xb6, 0x7e, 0x28, 0x9b, 0xd7, 0xef, 0xa0, 0x84, 0x37, 0xf2, 0xec, 0xdf, 0x00, 0x00, 0x00, 0xff, - 0xff, 0x8f, 0xaf, 0x7d, 0x15, 0x51, 0x0b, 0x00, 0x00, + // 1149 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4b, 0x6f, 0x23, 0x45, + 0x10, 0xf6, 0x78, 0x3c, 0x7e, 0x94, 0x13, 0xaf, 0xb7, 0xd7, 0xc9, 0x4e, 0xbc, 0x92, 0x63, 0x8c, + 0x90, 0xac, 0x55, 0xe4, 0xac, 0xbc, 0x08, 0x09, 0xc4, 0x25, 0x09, 0x2c, 0x59, 0x89, 0x04, 0xe8, + 0xec, 0x12, 0x04, 0x87, 0x51, 0xdb, 0xee, 0x8c, 0x47, 0x3b, 0xaf, 0x4c, 0xf7, 0x90, 0xf8, 0x0c, + 0x67, 0xc4, 0x9d, 0xdb, 0xfe, 0x9a, 0xdc, 0xd8, 0x23, 0x27, 0x04, 0xc9, 0x1f, 0x41, 0xfd, 0x18, + 0x3f, 0x82, 0xf7, 0xa5, 0xe4, 0x62, 0x75, 0x7d, 0x5f, 0x75, 0x4d, 0x75, 0xf5, 0x57, 0xe5, 0x86, + 0xfb, 0x8c, 0x47, 0x09, 0xdd, 0x96, 0xbf, 0xf1, 0x60, 0x3b, 0x89, 0x87, 0xbd, 0x38, 0x89, 0x78, + 0x84, 0x8a, 0x7c, 0x4c, 0xc2, 0x88, 0x35, 0x37, 0x16, 0x1d, 0xf8, 0x24, 0xa6, 0x4c, 0xb9, 0x34, + 0x1b, 0x6e, 0xe4, 0x46, 0x72, 0xb9, 0x2d, 0x56, 0x1a, 0x6d, 0x2f, 0x6e, 0x88, 0x93, 0x28, 0xb8, + 0xb6, 0x6f, 0xc3, 0x8d, 0x22, 0xd7, 0xa7, 0xdb, 0xd2, 0x1a, 0xa4, 0x27, 0xdb, 0x24, 0x9c, 0x28, + 0xaa, 0x73, 0x07, 0x56, 0x8f, 0x13, 0x8f, 0x53, 0x4c, 0x59, 0x1c, 0x85, 0x8c, 0x76, 0x7e, 0x31, + 0x60, 0x45, 0x23, 0xa7, 0x29, 0x65, 0x1c, 0xed, 0x00, 0x70, 0x2f, 0xa0, 0x8c, 0x26, 0x1e, 0x65, + 0xb6, 0xd1, 0x36, 0xbb, 0xd5, 0xfe, 0x03, 0xb1, 0x3b, 0xa0, 0x7c, 0x4c, 0x53, 0xe6, 0x0c, 0xa3, + 0x78, 0xd2, 0x7b, 0xe6, 0x05, 0xf4, 0x48, 0xba, 0xec, 0x16, 0x2e, 0xfe, 0xde, 0xcc, 0xe1, 0xb9, + 0x4d, 0x68, 0x1d, 0x8a, 0x9c, 0x86, 0x24, 0xe4, 0x76, 0xbe, 0x6d, 0x74, 0x2b, 0x58, 0x5b, 0xc8, + 0x86, 0x52, 0x42, 0x63, 0xdf, 0x1b, 0x12, 0xdb, 0x6c, 0x1b, 0x5d, 0x13, 0x67, 0x66, 0xe7, 0xa5, + 0x05, 0xab, 0x2a, 0x5c, 0x96, 0xc6, 0x06, 0x94, 0x03, 0x2f, 0x74, 0x44, 0x54, 0xdb, 0x50, 0xce, + 0x81, 0x17, 0x8a, 0xcf, 0x4a, 0x8a, 0x9c, 0x2b, 0x2a, 0xaf, 0x29, 0x72, 0x2e, 0xa9, 0x4f, 0x04, + 0xc5, 0x87, 0x63, 0x9a, 0x30, 0xdb, 0x94, 0xa9, 0x37, 0x7a, 0xaa, 0xce, 0xbd, 0xaf, 0xc9, 0x80, + 0xfa, 0x07, 0x8a, 0xd4, 0x39, 0x4f, 0x7d, 0x51, 0x1f, 0xd6, 0x44, 0xc8, 0x84, 0xb2, 0xc8, 0x4f, + 0xb9, 0x17, 0x85, 0xce, 0x99, 0x17, 0x8e, 0xa2, 0x33, 0xbb, 0x20, 0xe3, 0xdf, 0x0b, 0xc8, 0x39, + 0x9e, 0x72, 0xc7, 0x92, 0x42, 0x5b, 0x00, 0xc4, 0x75, 0x13, 0xea, 0x12, 0x4e, 0x99, 0x6d, 0xb5, + 0xcd, 0x6e, 0xad, 0xbf, 0x92, 0x7d, 0x6d, 0xc7, 0x75, 0x13, 0x3c, 0xc7, 0xa3, 0xcf, 0x60, 0x23, + 0x26, 0x09, 0xf7, 0x88, 0x2f, 0xbe, 0x22, 0x6b, 0xef, 0x8c, 0x3c, 0x46, 0x06, 0x3e, 0x1d, 0xd9, + 0xc5, 0xb6, 0xd1, 0x2d, 0xe3, 0xfb, 0xda, 0x21, 0xbb, 0x9b, 0x2f, 0x34, 0x8d, 0x7e, 0x5a, 0xb2, + 0x97, 0xf1, 0x84, 0x70, 0xea, 0x4e, 0xec, 0x52, 0xdb, 0xe8, 0xd6, 0xfa, 0x9b, 0xd9, 0x87, 0xbf, + 0x5d, 0x8c, 0x71, 0xa4, 0xdd, 0xfe, 0x17, 0x3c, 0x23, 0xd0, 0x26, 0x54, 0xd9, 0x0b, 0x2f, 0x76, + 0x86, 0xe3, 0x34, 0x7c, 0xc1, 0xec, 0xb2, 0x4c, 0x05, 0x04, 0xb4, 0x27, 0x11, 0xf4, 0x10, 0xac, + 0xb1, 0x17, 0x72, 0x66, 0x57, 0xda, 0x86, 0x2c, 0xa8, 0x52, 0x57, 0x2f, 0x53, 0x57, 0x6f, 0x27, + 0x9c, 0x60, 0xe5, 0x82, 0x10, 0x14, 0x18, 0xa7, 0xb1, 0x0d, 0xb2, 0x6c, 0x72, 0x8d, 0x1a, 0x60, + 0x25, 0x24, 0x74, 0xa9, 0x5d, 0x95, 0xa0, 0x32, 0xd0, 0x63, 0xa8, 0x9e, 0xa6, 0x34, 0x99, 0x38, + 0x2a, 0xf6, 0x8a, 0x8c, 0x8d, 0xb2, 0x53, 0x7c, 0x27, 0xa8, 0x7d, 0xc1, 0x60, 0x38, 0x9d, 0xae, + 0xd1, 0x23, 0x00, 0x36, 0x26, 0xc9, 0xc8, 0xf1, 0xc2, 0x93, 0xc8, 0x5e, 0x95, 0x7b, 0xee, 0x66, + 0x7b, 0x8e, 0x04, 0xf3, 0x34, 0x3c, 0x89, 0x70, 0x85, 0x65, 0x4b, 0xf4, 0x31, 0xac, 0x9f, 0x79, + 0x7c, 0x1c, 0xa5, 0xdc, 0xd1, 0x5a, 0x73, 0x7c, 0x21, 0x04, 0x66, 0xd7, 0xda, 0x66, 0xb7, 0x82, + 0x1b, 0x9a, 0xc5, 0x8a, 0x94, 0x22, 0x61, 0x22, 0x65, 0xdf, 0x0b, 0x3c, 0x6e, 0xdf, 0x51, 0x29, + 0x4b, 0xa3, 0xf3, 0xd2, 0x00, 0x98, 0x25, 0x26, 0x0b, 0xc7, 0x69, 0xec, 0x04, 0x9e, 0xef, 0x7b, + 0x4c, 0x8b, 0x14, 0x04, 0x74, 0x20, 0x11, 0xd4, 0x86, 0xc2, 0x49, 0x1a, 0x0e, 0xa5, 0x46, 0xab, + 0x33, 0x69, 0x3c, 0x49, 0xc3, 0x21, 0x96, 0x0c, 0xda, 0x82, 0xb2, 0x9b, 0x44, 0x69, 0xec, 0x85, + 0xae, 0x54, 0x5a, 0xb5, 0x5f, 0xcf, 0xbc, 0xbe, 0xd2, 0x38, 0x9e, 0x7a, 0xa0, 0x0f, 0xb3, 0x42, + 0x5a, 0xd2, 0x75, 0x35, 0x73, 0xc5, 0x02, 0xd4, 0x75, 0xed, 0x9c, 0x41, 0x65, 0x5a, 0x08, 0x99, + 0xa2, 0xae, 0xd7, 0x88, 0x9e, 0x4f, 0x53, 0x54, 0xfc, 0x88, 0x9e, 0xa3, 0x0f, 0x60, 0x85, 0x47, + 0x9c, 0xf8, 0x8e, 0xc4, 0x98, 0x6e, 0xa7, 0xaa, 0xc4, 0x64, 0x18, 0x86, 0x6a, 0x90, 0x1f, 0x4c, + 0x64, 0xbf, 0x96, 0x71, 0x7e, 0x30, 0x11, 0xcd, 0xad, 0x2b, 0x58, 0x90, 0x15, 0xd4, 0x56, 0xa7, + 0x09, 0x05, 0x71, 0x32, 0x21, 0x81, 0x90, 0xe8, 0xa6, 0xad, 0x60, 0xb9, 0xee, 0xf4, 0xa1, 0x9c, + 0x9d, 0x47, 0xc7, 0x33, 0x96, 0xc4, 0x33, 0x17, 0xe2, 0x6d, 0x82, 0x25, 0x0f, 0x26, 0x1c, 0x16, + 0x4a, 0xac, 0xad, 0xce, 0x6f, 0x06, 0xd4, 0xb2, 0x99, 0xa1, 0x34, 0x8d, 0xba, 0x50, 0x9c, 0xce, + 0x2d, 0x51, 0xa2, 0xda, 0x54, 0x1b, 0x12, 0xdd, 0xcf, 0x61, 0xcd, 0xa3, 0x26, 0x94, 0xce, 0x48, + 0x12, 0x8a, 0xc2, 0xcb, 0x19, 0xb5, 0x9f, 0xc3, 0x19, 0x80, 0xb6, 0x32, 0xc1, 0x9b, 0xaf, 0x17, + 0xfc, 0x7e, 0x4e, 0x4b, 0x7e, 0xb7, 0x0c, 0xc5, 0x84, 0xb2, 0xd4, 0xe7, 0x9d, 0x5f, 0x4d, 0xb8, + 0x2b, 0x05, 0x74, 0x48, 0x82, 0xd9, 0x20, 0x7b, 0x63, 0xe3, 0x1b, 0x37, 0x68, 0xfc, 0xfc, 0x0d, + 0x1b, 0xbf, 0x01, 0x16, 0xe3, 0x24, 0xe1, 0x7a, 0x16, 0x2b, 0x03, 0xd5, 0xc1, 0xa4, 0xe1, 0x48, + 0xcf, 0x3d, 0xb1, 0x9c, 0xf5, 0xbf, 0xf5, 0xf6, 0xfe, 0x9f, 0x9f, 0xbf, 0xc5, 0xf7, 0x98, 0xbf, + 0xaf, 0x6f, 0xd3, 0xd2, 0xbb, 0xb4, 0x69, 0x79, 0xbe, 0x4d, 0x13, 0x40, 0xf3, 0xb7, 0xa0, 0xa5, + 0xd1, 0x00, 0x4b, 0x48, 0x51, 0xfd, 0xa3, 0x55, 0xb0, 0x32, 0x50, 0x13, 0xca, 0xfa, 0xd6, 0x85, + 0xf6, 0x05, 0x31, 0xb5, 0x67, 0xe7, 0x36, 0xdf, 0x7a, 0xee, 0xce, 0x1f, 0xa6, 0xfe, 0xe8, 0xf7, + 0xc4, 0x4f, 0x67, 0x77, 0x2f, 0x12, 0x14, 0xa8, 0x6e, 0x06, 0x65, 0xbc, 0x59, 0x11, 0xf9, 0x1b, + 0x28, 0xc2, 0xbc, 0x2d, 0x45, 0x14, 0x96, 0x28, 0xc2, 0x5a, 0xa2, 0x88, 0xe2, 0xfb, 0x29, 0xa2, + 0x74, 0x2b, 0x8a, 0x28, 0xbf, 0x8b, 0x22, 0x2a, 0xf3, 0x8a, 0x48, 0xe1, 0xde, 0xc2, 0xe5, 0x68, + 0x49, 0xac, 0x43, 0xf1, 0x67, 0x89, 0x68, 0x4d, 0x68, 0xeb, 0xb6, 0x44, 0xf1, 0x70, 0x17, 0x0a, + 0xe2, 0x19, 0x80, 0x4a, 0x60, 0xe2, 0x9d, 0xe3, 0x7a, 0x0e, 0x55, 0xc0, 0xda, 0xfb, 0xe6, 0xf9, + 0xe1, 0xb3, 0xba, 0x21, 0xb0, 0xa3, 0xe7, 0x07, 0xf5, 0xbc, 0x58, 0x1c, 0x3c, 0x3d, 0xac, 0x9b, + 0x72, 0xb1, 0xf3, 0x43, 0xbd, 0x80, 0xaa, 0x50, 0x92, 0x5e, 0x5f, 0xe2, 0xba, 0xd5, 0xff, 0xd3, + 0x00, 0xeb, 0x48, 0xbc, 0xf4, 0xd0, 0xa7, 0x50, 0x54, 0x53, 0x0c, 0xad, 0x2d, 0x4e, 0x35, 0x2d, + 0xb6, 0xe6, 0xfa, 0x75, 0x58, 0x1d, 0xf3, 0x91, 0x81, 0xf6, 0x00, 0x66, 0x1d, 0x81, 0x36, 0x16, + 0xea, 0x3f, 0x3f, 0xab, 0x9a, 0xcd, 0x65, 0x94, 0xae, 0xd6, 0x13, 0xa8, 0xce, 0x15, 0x11, 0x2d, + 0xba, 0x2e, 0xc8, 0xbe, 0xf9, 0x60, 0x29, 0xa7, 0xe2, 0xf4, 0x0f, 0xa1, 0x26, 0xdf, 0x9b, 0x42, + 0xcf, 0xea, 0x64, 0x9f, 0x43, 0x15, 0xd3, 0x20, 0xe2, 0x54, 0xe2, 0x68, 0xaa, 0x8f, 0xf9, 0x67, + 0x69, 0x73, 0xed, 0x1a, 0xaa, 0x9f, 0xaf, 0xb9, 0xdd, 0x8f, 0x2e, 0xfe, 0x6d, 0xe5, 0x2e, 0x2e, + 0x5b, 0xc6, 0xab, 0xcb, 0x96, 0xf1, 0xcf, 0x65, 0xcb, 0xf8, 0xfd, 0xaa, 0x95, 0x7b, 0x75, 0xd5, + 0xca, 0xfd, 0x75, 0xd5, 0xca, 0xfd, 0x58, 0xd2, 0xcf, 0xe4, 0x41, 0x51, 0xde, 0xd0, 0xe3, 0xff, + 0x02, 0x00, 0x00, 0xff, 0xff, 0x84, 0xe1, 0x09, 0x34, 0x90, 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1222,19 +815,19 @@ const _ = grpc.SupportPackageIsVersion4 // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type StoreClient interface { - // / Series streams each Series (Labels and chunk/downsampling chunk) for given label matchers and time range. - // / - // / Series should strictly stream full series after series, optionally split by time. This means that a single frame can contain - // / partition of the single series, but once a new series is started to be streamed it means that no more data will - // / be sent for previous one. - // / Series has to be sorted. - // / - // / There is no requirements on chunk sorting, however it is recommended to have chunk sorted by chunk min time. - // / This heavily optimizes the resource usage on Querier / Federated Queries. + /// Series streams each Series (Labels and chunk/downsampling chunk) for given label matchers and time range. + /// + /// Series should strictly stream full series after series, optionally split by time. This means that a single frame can contain + /// partition of the single series, but once a new series is started to be streamed it means that no more data will + /// be sent for previous one. + /// Series has to be sorted. + /// + /// There is no requirements on chunk sorting, however it is recommended to have chunk sorted by chunk min time. + /// This heavily optimizes the resource usage on Querier / Federated Queries. Series(ctx context.Context, in *SeriesRequest, opts ...grpc.CallOption) (Store_SeriesClient, error) - // / LabelNames returns all label names constrained by the given matchers. + /// LabelNames returns all label names constrained by the given matchers. LabelNames(ctx context.Context, in *LabelNamesRequest, opts ...grpc.CallOption) (*LabelNamesResponse, error) - // / LabelValues returns all label values for given label name. + /// LabelValues returns all label values for given label name. LabelValues(ctx context.Context, in *LabelValuesRequest, opts ...grpc.CallOption) (*LabelValuesResponse, error) } @@ -1298,19 +891,19 @@ func (c *storeClient) LabelValues(ctx context.Context, in *LabelValuesRequest, o // StoreServer is the server API for Store service. type StoreServer interface { - // / Series streams each Series (Labels and chunk/downsampling chunk) for given label matchers and time range. - // / - // / Series should strictly stream full series after series, optionally split by time. This means that a single frame can contain - // / partition of the single series, but once a new series is started to be streamed it means that no more data will - // / be sent for previous one. - // / Series has to be sorted. - // / - // / There is no requirements on chunk sorting, however it is recommended to have chunk sorted by chunk min time. - // / This heavily optimizes the resource usage on Querier / Federated Queries. + /// Series streams each Series (Labels and chunk/downsampling chunk) for given label matchers and time range. + /// + /// Series should strictly stream full series after series, optionally split by time. This means that a single frame can contain + /// partition of the single series, but once a new series is started to be streamed it means that no more data will + /// be sent for previous one. + /// Series has to be sorted. + /// + /// There is no requirements on chunk sorting, however it is recommended to have chunk sorted by chunk min time. + /// This heavily optimizes the resource usage on Querier / Federated Queries. Series(*SeriesRequest, Store_SeriesServer) error - // / LabelNames returns all label names constrained by the given matchers. + /// LabelNames returns all label names constrained by the given matchers. LabelNames(context.Context, *LabelNamesRequest) (*LabelNamesResponse, error) - // / LabelValues returns all label values for given label name. + /// LabelValues returns all label values for given label name. LabelValues(context.Context, *LabelValuesRequest) (*LabelValuesResponse, error) } @@ -1506,10 +1099,6 @@ func (m *WriteResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } return len(dAtA) - i, nil } @@ -1533,10 +1122,6 @@ func (m *WriteRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Replica != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Replica)) i-- @@ -1586,10 +1171,6 @@ func (m *SeriesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Limit != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Limit)) i-- @@ -1745,10 +1326,6 @@ func (m *QueryHints) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Range != nil { { size, err := m.Range.MarshalToSizedBuffer(dAtA[:i]) @@ -1813,10 +1390,6 @@ func (m *ShardInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Labels) > 0 { for iNdEx := len(m.Labels) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Labels[iNdEx]) @@ -1869,10 +1442,6 @@ func (m *Func) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) @@ -1903,10 +1472,6 @@ func (m *Grouping) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Labels) > 0 { for iNdEx := len(m.Labels) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Labels[iNdEx]) @@ -1949,10 +1514,6 @@ func (m *Range) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Millis != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Millis)) i-- @@ -1981,10 +1542,6 @@ func (m *SeriesResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -2073,10 +1630,6 @@ func (m *LabelNamesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Limit != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Limit)) i-- @@ -2165,10 +1718,6 @@ func (m *LabelNamesResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Hints != nil { { size, err := m.Hints.MarshalToSizedBuffer(dAtA[:i]) @@ -2222,10 +1771,6 @@ func (m *LabelValuesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Limit != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Limit)) i-- @@ -2321,10 +1866,6 @@ func (m *LabelValuesResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Hints != nil { { size, err := m.Hints.MarshalToSizedBuffer(dAtA[:i]) @@ -2375,9 +1916,6 @@ func (m *WriteResponse) Size() (n int) { } var l int _ = l - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2400,9 +1938,6 @@ func (m *WriteRequest) Size() (n int) { if m.Replica != 0 { n += 1 + sovRpc(uint64(m.Replica)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2470,9 +2005,6 @@ func (m *SeriesRequest) Size() (n int) { if m.Limit != 0 { n += 1 + sovRpc(uint64(m.Limit)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2497,9 +2029,6 @@ func (m *QueryHints) Size() (n int) { l = m.Range.Size() n += 1 + l + sovRpc(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2524,9 +2053,6 @@ func (m *ShardInfo) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2540,9 +2066,6 @@ func (m *Func) Size() (n int) { if l > 0 { n += 1 + l + sovRpc(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2561,9 +2084,6 @@ func (m *Grouping) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2576,9 +2096,6 @@ func (m *Range) Size() (n int) { if m.Millis != 0 { n += 1 + sovRpc(uint64(m.Millis)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2591,9 +2108,6 @@ func (m *SeriesResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2668,9 +2182,6 @@ func (m *LabelNamesRequest) Size() (n int) { if m.Limit != 0 { n += 1 + sovRpc(uint64(m.Limit)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2696,9 +2207,6 @@ func (m *LabelNamesResponse) Size() (n int) { l = m.Hints.Size() n += 1 + l + sovRpc(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2743,9 +2251,6 @@ func (m *LabelValuesRequest) Size() (n int) { if m.Limit != 0 { n += 1 + sovRpc(uint64(m.Limit)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2771,9 +2276,6 @@ func (m *LabelValuesResponse) Size() (n int) { l = m.Hints.Size() n += 1 + l + sovRpc(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -2824,7 +2326,6 @@ func (m *WriteResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -2892,7 +2393,7 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Timeseries = append(m.Timeseries, &prompb.TimeSeries{}) + m.Timeseries = append(m.Timeseries, prompb.TimeSeries{}) if err := m.Timeseries[len(m.Timeseries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2960,7 +2461,6 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3066,7 +2566,7 @@ func (m *SeriesRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Matchers = append(m.Matchers, &LabelMatcher{}) + m.Matchers = append(m.Matchers, LabelMatcher{}) if err := m.Matchers[len(m.Matchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -3427,7 +2927,6 @@ func (m *SeriesRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3605,7 +3104,6 @@ func (m *QueryHints) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3746,7 +3244,6 @@ func (m *ShardInfo) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3829,7 +3326,6 @@ func (m *Func) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -3932,7 +3428,6 @@ func (m *Grouping) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -4002,7 +3497,6 @@ func (m *Range) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -4155,7 +3649,6 @@ func (m *SeriesResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -4336,7 +3829,7 @@ func (m *LabelNamesRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Matchers = append(m.Matchers, &LabelMatcher{}) + m.Matchers = append(m.Matchers, LabelMatcher{}) if err := m.Matchers[len(m.Matchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -4404,7 +3897,6 @@ func (m *LabelNamesRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -4555,7 +4047,6 @@ func (m *LabelNamesResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -4768,7 +4259,7 @@ func (m *LabelValuesRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Matchers = append(m.Matchers, &LabelMatcher{}) + m.Matchers = append(m.Matchers, LabelMatcher{}) if err := m.Matchers[len(m.Matchers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -4836,7 +4327,6 @@ func (m *LabelValuesRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -4987,7 +4477,6 @@ func (m *LabelValuesResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/store/storepb/rpc.proto b/pkg/store/storepb/rpc.proto index 6a4d77fd0f..2a6313ec02 100644 --- a/pkg/store/storepb/rpc.proto +++ b/pkg/store/storepb/rpc.proto @@ -5,11 +5,23 @@ syntax = "proto3"; package thanos; import "store/storepb/types.proto"; +import "gogoproto/gogo.proto"; import "store/storepb/prompb/types.proto"; import "google/protobuf/any.proto"; option go_package = "storepb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + /// Store represents API against instance that stores XOR encoded values with label set metadata (e.g Prometheus metrics). service Store { /// Series streams each Series (Labels and chunk/downsampling chunk) for given label matchers and time range. @@ -40,7 +52,7 @@ message WriteResponse { } message WriteRequest { - repeated prometheus_copy.TimeSeries timeseries = 1; + repeated prometheus_copy.TimeSeries timeseries = 1 [(gogoproto.nullable) = false]; string tenant = 2; int64 replica = 3; } @@ -48,7 +60,7 @@ message WriteRequest { message SeriesRequest { int64 min_time = 1; int64 max_time = 2; - repeated LabelMatcher matchers = 3; + repeated LabelMatcher matchers = 3 [(gogoproto.nullable) = false]; int64 max_resolution_window = 4; repeated Aggr aggregates = 5; @@ -190,7 +202,7 @@ message LabelNamesRequest { // implementation of a specific store. google.protobuf.Any hints = 5; - repeated LabelMatcher matchers = 6; + repeated LabelMatcher matchers = 6 [(gogoproto.nullable) = false]; // same as in series request. repeated string without_replica_labels = 7; @@ -226,7 +238,7 @@ message LabelValuesRequest { // implementation of a specific store. google.protobuf.Any hints = 6; - repeated LabelMatcher matchers = 7; + repeated LabelMatcher matchers = 7 [(gogoproto.nullable) = false]; // same as in series request. repeated string without_replica_labels = 8; diff --git a/pkg/store/storepb/shard_info.go b/pkg/store/storepb/shard_info.go index d95306c8be..51ea4469f0 100644 --- a/pkg/store/storepb/shard_info.go +++ b/pkg/store/storepb/shard_info.go @@ -36,7 +36,7 @@ func (s *ShardMatcher) Close() { } } -func (s *ShardMatcher) MatchesLabels(lbls []*labelpb.Label) bool { +func (s *ShardMatcher) MatchesLabels(lbls []labelpb.Label) bool { // Match all series when query is not sharded if s == nil || !s.isSharded { return true @@ -56,7 +56,7 @@ func (s *ShardMatcher) MatchesLabels(lbls []*labelpb.Label) bool { return hash%uint64(s.totalShards) == uint64(s.shardIndex) } -func shardByLabel(labelSet map[string]struct{}, lbl *labelpb.Label, groupingBy bool) bool { +func shardByLabel(labelSet map[string]struct{}, lbl labelpb.Label, groupingBy bool) bool { _, shardHasLabel := labelSet[lbl.Name] if groupingBy && shardHasLabel { return true diff --git a/pkg/store/storepb/shard_info_test.go b/pkg/store/storepb/shard_info_test.go index f5c0078b40..7ff659a8ef 100644 --- a/pkg/store/storepb/shard_info_test.go +++ b/pkg/store/storepb/shard_info_test.go @@ -23,7 +23,7 @@ func TestShardInfo_MatchesSeries(t *testing.T) { tests := []struct { name string shardInfo *ShardInfo - series []*labelpb.Label + series []labelpb.Label matches bool }{ { diff --git a/pkg/store/storepb/testutil/client.go b/pkg/store/storepb/testutil/client.go index 1c38079faf..90874842d6 100644 --- a/pkg/store/storepb/testutil/client.go +++ b/pkg/store/storepb/testutil/client.go @@ -20,12 +20,12 @@ type TestClient struct { Shardable bool WithoutReplicaLabelsEnabled bool IsLocalStore bool - StoreTSDBInfos []*infopb.TSDBInfo + StoreTSDBInfos []infopb.TSDBInfo } func (c TestClient) LabelSets() []labels.Labels { return c.ExtLset } func (c TestClient) TimeRange() (mint, maxt int64) { return c.MinTime, c.MaxTime } -func (c TestClient) TSDBInfos() []*infopb.TSDBInfo { return c.StoreTSDBInfos } +func (c TestClient) TSDBInfos() []infopb.TSDBInfo { return c.StoreTSDBInfos } func (c TestClient) SupportsSharding() bool { return c.Shardable } func (c TestClient) SupportsWithoutReplicaLabels() bool { return c.WithoutReplicaLabelsEnabled } func (c TestClient) String() string { return c.Name } diff --git a/pkg/store/storepb/testutil/series.go b/pkg/store/storepb/testutil/series.go index c4902cdcb0..b743e9b50b 100644 --- a/pkg/store/storepb/testutil/series.go +++ b/pkg/store/storepb/testutil/series.go @@ -167,7 +167,7 @@ func ReadSeriesFromBlock(t testing.TB, h tsdb.BlockReader, extLabels labels.Labe c.MaxTime = c.MinTime + int64(chEnc.NumSamples()) - 1 } - expected[len(expected)-1].Chunks = append(expected[len(expected)-1].Chunks, &storepb.AggrChunk{ + expected[len(expected)-1].Chunks = append(expected[len(expected)-1].Chunks, storepb.AggrChunk{ MinTime: c.MinTime, MaxTime: c.MaxTime, Raw: &storepb.Chunk{ diff --git a/pkg/store/storepb/types.pb.go b/pkg/store/storepb/types.pb.go index 9cccc0c48b..194f800906 100644 --- a/pkg/store/storepb/types.pb.go +++ b/pkg/store/storepb/types.pb.go @@ -9,6 +9,7 @@ import ( math "math" math_bits "math/bits" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" ) @@ -118,12 +119,9 @@ func (LabelMatcher_Type) EnumDescriptor() ([]byte, []int) { } type Chunk struct { - Type Chunk_Encoding `protobuf:"varint,1,opt,name=type,proto3,enum=thanos.Chunk_Encoding" json:"type,omitempty"` - Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` - Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"hash,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Type Chunk_Encoding `protobuf:"varint,1,opt,name=type,proto3,enum=thanos.Chunk_Encoding" json:"type,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"hash,omitempty"` } func (m *Chunk) Reset() { *m = Chunk{} } @@ -159,33 +157,9 @@ func (m *Chunk) XXX_DiscardUnknown() { var xxx_messageInfo_Chunk proto.InternalMessageInfo -func (m *Chunk) GetType() Chunk_Encoding { - if m != nil { - return m.Type - } - return Chunk_XOR -} - -func (m *Chunk) GetData() []byte { - if m != nil { - return m.Data - } - return nil -} - -func (m *Chunk) GetHash() uint64 { - if m != nil { - return m.Hash - } - return 0 -} - type Series struct { - Labels []*labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` - Chunks []*AggrChunk `protobuf:"bytes,2,rep,name=chunks,proto3" json:"chunks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels []labelpb.Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels"` + Chunks []AggrChunk `protobuf:"bytes,2,rep,name=chunks,proto3" json:"chunks"` } func (m *Series) Reset() { *m = Series{} } @@ -221,32 +195,15 @@ func (m *Series) XXX_DiscardUnknown() { var xxx_messageInfo_Series proto.InternalMessageInfo -func (m *Series) GetLabels() []*labelpb.Label { - if m != nil { - return m.Labels - } - return nil -} - -func (m *Series) GetChunks() []*AggrChunk { - if m != nil { - return m.Chunks - } - return nil -} - type AggrChunk struct { - MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` - MaxTime int64 `protobuf:"varint,2,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` - Raw *Chunk `protobuf:"bytes,3,opt,name=raw,proto3" json:"raw,omitempty"` - Count *Chunk `protobuf:"bytes,4,opt,name=count,proto3" json:"count,omitempty"` - Sum *Chunk `protobuf:"bytes,5,opt,name=sum,proto3" json:"sum,omitempty"` - Min *Chunk `protobuf:"bytes,6,opt,name=min,proto3" json:"min,omitempty"` - Max *Chunk `protobuf:"bytes,7,opt,name=max,proto3" json:"max,omitempty"` - Counter *Chunk `protobuf:"bytes,8,opt,name=counter,proto3" json:"counter,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + MinTime int64 `protobuf:"varint,1,opt,name=min_time,json=minTime,proto3" json:"min_time,omitempty"` + MaxTime int64 `protobuf:"varint,2,opt,name=max_time,json=maxTime,proto3" json:"max_time,omitempty"` + Raw *Chunk `protobuf:"bytes,3,opt,name=raw,proto3" json:"raw,omitempty"` + Count *Chunk `protobuf:"bytes,4,opt,name=count,proto3" json:"count,omitempty"` + Sum *Chunk `protobuf:"bytes,5,opt,name=sum,proto3" json:"sum,omitempty"` + Min *Chunk `protobuf:"bytes,6,opt,name=min,proto3" json:"min,omitempty"` + Max *Chunk `protobuf:"bytes,7,opt,name=max,proto3" json:"max,omitempty"` + Counter *Chunk `protobuf:"bytes,8,opt,name=counter,proto3" json:"counter,omitempty"` } func (m *AggrChunk) Reset() { *m = AggrChunk{} } @@ -282,70 +239,11 @@ func (m *AggrChunk) XXX_DiscardUnknown() { var xxx_messageInfo_AggrChunk proto.InternalMessageInfo -func (m *AggrChunk) GetMinTime() int64 { - if m != nil { - return m.MinTime - } - return 0 -} - -func (m *AggrChunk) GetMaxTime() int64 { - if m != nil { - return m.MaxTime - } - return 0 -} - -func (m *AggrChunk) GetRaw() *Chunk { - if m != nil { - return m.Raw - } - return nil -} - -func (m *AggrChunk) GetCount() *Chunk { - if m != nil { - return m.Count - } - return nil -} - -func (m *AggrChunk) GetSum() *Chunk { - if m != nil { - return m.Sum - } - return nil -} - -func (m *AggrChunk) GetMin() *Chunk { - if m != nil { - return m.Min - } - return nil -} - -func (m *AggrChunk) GetMax() *Chunk { - if m != nil { - return m.Max - } - return nil -} - -func (m *AggrChunk) GetCounter() *Chunk { - if m != nil { - return m.Counter - } - return nil -} - // Matcher specifies a rule, which can match or set of labels or not. type LabelMatcher struct { - Type LabelMatcher_Type `protobuf:"varint,1,opt,name=type,proto3,enum=thanos.LabelMatcher_Type" json:"type,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Type LabelMatcher_Type `protobuf:"varint,1,opt,name=type,proto3,enum=thanos.LabelMatcher_Type" json:"type,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` } func (m *LabelMatcher) Reset() { *m = LabelMatcher{} } @@ -381,27 +279,6 @@ func (m *LabelMatcher) XXX_DiscardUnknown() { var xxx_messageInfo_LabelMatcher proto.InternalMessageInfo -func (m *LabelMatcher) GetType() LabelMatcher_Type { - if m != nil { - return m.Type - } - return LabelMatcher_EQ -} - -func (m *LabelMatcher) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *LabelMatcher) GetValue() string { - if m != nil { - return m.Value - } - return "" -} - func init() { proto.RegisterEnum("thanos.PartialResponseStrategy", PartialResponseStrategy_name, PartialResponseStrategy_value) proto.RegisterEnum("thanos.Chunk_Encoding", Chunk_Encoding_name, Chunk_Encoding_value) @@ -415,38 +292,40 @@ func init() { func init() { proto.RegisterFile("store/storepb/types.proto", fileDescriptor_121fba57de02d8e0) } var fileDescriptor_121fba57de02d8e0 = []byte{ - // 487 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x93, 0xdd, 0x6e, 0xd3, 0x30, - 0x14, 0xc7, 0xe7, 0x24, 0x4d, 0xdb, 0xc3, 0x06, 0xc1, 0x7c, 0xb9, 0x5c, 0x94, 0x2a, 0x08, 0x51, - 0x26, 0x91, 0x49, 0xe3, 0x82, 0xeb, 0x0c, 0x85, 0x0f, 0x69, 0x5b, 0x99, 0x1b, 0x09, 0xb4, 0x9b, - 0xc9, 0xed, 0xac, 0x26, 0xa2, 0x71, 0xa2, 0xd8, 0x81, 0xf6, 0x31, 0x10, 0x3c, 0x14, 0x97, 0x3c, - 0x02, 0xea, 0x93, 0x20, 0x3b, 0x29, 0xa3, 0x52, 0x6e, 0xa2, 0x93, 0xff, 0xff, 0x77, 0xec, 0x73, - 0x8e, 0x8e, 0x61, 0x20, 0x55, 0x5e, 0xf2, 0x23, 0xf3, 0x2d, 0x66, 0x47, 0x6a, 0x5d, 0x70, 0x19, - 0x14, 0x65, 0xae, 0x72, 0xec, 0xaa, 0x84, 0x89, 0x5c, 0x3e, 0x6e, 0x90, 0x25, 0x9b, 0xf1, 0xe5, - 0x2e, 0xe2, 0xff, 0x44, 0xd0, 0x79, 0x93, 0x54, 0xe2, 0x0b, 0x3e, 0x04, 0x47, 0x1b, 0x04, 0x8d, - 0xd0, 0xf8, 0xf6, 0xf1, 0xc3, 0xa0, 0xce, 0x0d, 0x8c, 0x19, 0x44, 0x62, 0x9e, 0x5f, 0xa7, 0x62, - 0x41, 0x0d, 0x83, 0x31, 0x38, 0xd7, 0x4c, 0x31, 0x62, 0x8d, 0xd0, 0x78, 0x9f, 0x9a, 0x58, 0x6b, - 0x09, 0x93, 0x09, 0xb1, 0x47, 0x68, 0xec, 0x50, 0x13, 0xfb, 0xaf, 0xa1, 0xb7, 0xcd, 0xc4, 0x5d, - 0xb0, 0x3f, 0x4f, 0xa8, 0xb7, 0x87, 0x0f, 0xa0, 0xff, 0xfe, 0xc3, 0x34, 0x9e, 0xbc, 0xa3, 0xe1, - 0x99, 0x87, 0xf0, 0x3d, 0xb8, 0xf3, 0xf6, 0x74, 0x12, 0xc6, 0x57, 0x37, 0xa2, 0xe5, 0x5f, 0x82, - 0x3b, 0xe5, 0x65, 0xca, 0x25, 0x7e, 0x06, 0xae, 0xa9, 0x5b, 0x12, 0x34, 0xb2, 0xc7, 0xb7, 0x8e, - 0x0f, 0xb6, 0x85, 0x9d, 0x6a, 0x95, 0x36, 0x26, 0x7e, 0x01, 0xee, 0x5c, 0x57, 0x2a, 0x89, 0x65, - 0xb0, 0xbb, 0x5b, 0x2c, 0x5c, 0x2c, 0x4a, 0xd3, 0x03, 0x6d, 0x00, 0xff, 0x87, 0x05, 0xfd, 0x7f, - 0x2a, 0x1e, 0x40, 0x2f, 0x4b, 0xc5, 0x95, 0x4a, 0xb3, 0xba, 0x75, 0x9b, 0x76, 0xb3, 0x54, 0xc4, - 0x69, 0xc6, 0x8d, 0xc5, 0x56, 0xb5, 0x65, 0x35, 0x16, 0x5b, 0x19, 0xeb, 0x09, 0xd8, 0x25, 0xfb, - 0x66, 0x7a, 0xfd, 0xaf, 0xa4, 0xfa, 0x1e, 0xed, 0xe0, 0xa7, 0xd0, 0x99, 0xe7, 0x95, 0x50, 0xc4, - 0x69, 0x43, 0x6a, 0x4f, 0x9f, 0x22, 0xab, 0x8c, 0x74, 0x5a, 0x4f, 0x91, 0x55, 0xa6, 0x81, 0x2c, - 0x15, 0xc4, 0x6d, 0x05, 0xb2, 0x54, 0x18, 0x80, 0xad, 0x48, 0xb7, 0x1d, 0x60, 0x2b, 0xfc, 0x1c, - 0xba, 0xe6, 0x2e, 0x5e, 0x92, 0x5e, 0x1b, 0xb4, 0x75, 0xfd, 0xef, 0x08, 0xf6, 0xcd, 0x48, 0xcf, - 0x98, 0x9a, 0x27, 0xbc, 0xc4, 0x2f, 0x77, 0xf6, 0x61, 0xb0, 0x33, 0xf6, 0x86, 0x09, 0xe2, 0x75, - 0xc1, 0x6f, 0x56, 0x42, 0xb0, 0x66, 0x50, 0x7d, 0x6a, 0x62, 0x7c, 0x1f, 0x3a, 0x5f, 0xd9, 0xb2, - 0xe2, 0x66, 0x4e, 0x7d, 0x5a, 0xff, 0xf8, 0x63, 0x70, 0x74, 0x1e, 0x76, 0xc1, 0x8a, 0x2e, 0xbc, - 0x3d, 0xbd, 0x18, 0xe7, 0xd1, 0x85, 0x87, 0xb4, 0x40, 0x23, 0xcf, 0x32, 0x02, 0x8d, 0x3c, 0xfb, - 0x30, 0x80, 0x47, 0x1f, 0x59, 0xa9, 0x52, 0xb6, 0xa4, 0x5c, 0x16, 0xb9, 0x90, 0x7c, 0xaa, 0x4a, - 0xa6, 0xf8, 0x62, 0x8d, 0x7b, 0xe0, 0x7c, 0x0a, 0xe9, 0xb9, 0xb7, 0x87, 0xfb, 0xd0, 0x09, 0x4f, - 0x26, 0x34, 0xf6, 0xd0, 0xc9, 0x83, 0x5f, 0x9b, 0x21, 0xfa, 0xbd, 0x19, 0xa2, 0x3f, 0x9b, 0x21, - 0xba, 0xec, 0x36, 0x4f, 0x62, 0xe6, 0x9a, 0x55, 0x7f, 0xf5, 0x37, 0x00, 0x00, 0xff, 0xff, 0xd0, - 0x27, 0x66, 0x93, 0x2a, 0x03, 0x00, 0x00, + // 528 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x93, 0xcf, 0x6e, 0xd3, 0x40, + 0x10, 0xc6, 0xbd, 0xb6, 0xe3, 0x24, 0x43, 0x0b, 0x66, 0xa9, 0x60, 0xd3, 0x83, 0x1b, 0x19, 0x21, + 0xa2, 0x22, 0x1c, 0xa9, 0x1c, 0x38, 0x27, 0x28, 0xfc, 0x91, 0xda, 0x86, 0x6e, 0x22, 0x81, 0xb8, + 0x54, 0x9b, 0x74, 0x71, 0x2c, 0xe2, 0x75, 0xe4, 0xdd, 0x40, 0xf2, 0x16, 0x20, 0x6e, 0x3c, 0x51, + 0x8e, 0x3d, 0x72, 0x42, 0x90, 0xbc, 0x08, 0xf2, 0xda, 0xa1, 0x44, 0xf2, 0x25, 0x9a, 0x7c, 0xdf, + 0x6f, 0x66, 0x67, 0xc7, 0xb3, 0xd0, 0x90, 0x2a, 0x49, 0x79, 0x5b, 0xff, 0xce, 0x46, 0x6d, 0xb5, + 0x9c, 0x71, 0x19, 0xcc, 0xd2, 0x44, 0x25, 0xd8, 0x51, 0x13, 0x26, 0x12, 0x79, 0x78, 0x10, 0x26, + 0x61, 0xa2, 0xa5, 0x76, 0x16, 0xe5, 0xee, 0x61, 0x91, 0x38, 0x65, 0x23, 0x3e, 0xdd, 0x4d, 0xf4, + 0x7f, 0x20, 0xa8, 0xbc, 0x98, 0xcc, 0xc5, 0x27, 0x7c, 0x0c, 0x76, 0x66, 0x10, 0xd4, 0x44, 0xad, + 0xdb, 0x27, 0xf7, 0x83, 0xbc, 0x62, 0xa0, 0xcd, 0xa0, 0x27, 0xc6, 0xc9, 0x55, 0x24, 0x42, 0xaa, + 0x19, 0x8c, 0xc1, 0xbe, 0x62, 0x8a, 0x11, 0xb3, 0x89, 0x5a, 0x7b, 0x54, 0xc7, 0x98, 0x80, 0x3d, + 0x61, 0x72, 0x42, 0xac, 0x26, 0x6a, 0xd9, 0x5d, 0x7b, 0xf5, 0xeb, 0x08, 0x51, 0xad, 0xf8, 0xcf, + 0xa1, 0xb6, 0xcd, 0xc7, 0x55, 0xb0, 0xde, 0xf7, 0xa9, 0x6b, 0xe0, 0x7d, 0xa8, 0xbf, 0x7e, 0x33, + 0x18, 0xf6, 0x5f, 0xd1, 0xce, 0x99, 0x8b, 0xf0, 0x3d, 0xb8, 0xf3, 0xf2, 0xb4, 0xdf, 0x19, 0x5e, + 0xde, 0x88, 0xa6, 0xff, 0x11, 0x9c, 0x01, 0x4f, 0x23, 0x2e, 0xf1, 0x13, 0x70, 0x74, 0xf7, 0x92, + 0xa0, 0xa6, 0xd5, 0xba, 0x75, 0xb2, 0xbf, 0x6d, 0xef, 0x34, 0x53, 0xf5, 0x69, 0x06, 0x2d, 0x10, + 0xdc, 0x06, 0x67, 0x9c, 0x75, 0x2d, 0x89, 0xa9, 0xe1, 0xbb, 0x5b, 0xb8, 0x13, 0x86, 0xa9, 0xbe, + 0xcf, 0x36, 0x21, 0xc7, 0xfc, 0xef, 0x26, 0xd4, 0xff, 0x79, 0xb8, 0x01, 0xb5, 0x38, 0x12, 0x97, + 0x2a, 0x8a, 0xf3, 0x61, 0x58, 0xb4, 0x1a, 0x47, 0x62, 0x18, 0xc5, 0x5c, 0x5b, 0x6c, 0x91, 0x5b, + 0x66, 0x61, 0xb1, 0x85, 0xb6, 0x8e, 0xc0, 0x4a, 0xd9, 0x17, 0x7d, 0xfb, 0xff, 0xda, 0xd3, 0x15, + 0x69, 0xe6, 0xe0, 0x87, 0x50, 0x19, 0x27, 0x73, 0xa1, 0x88, 0x5d, 0x86, 0xe4, 0x5e, 0x56, 0x45, + 0xce, 0x63, 0x52, 0x29, 0xad, 0x22, 0xe7, 0x71, 0x06, 0xc4, 0x91, 0x20, 0x4e, 0x29, 0x10, 0x47, + 0x42, 0x03, 0x6c, 0x41, 0xaa, 0xe5, 0x00, 0x5b, 0xe0, 0xc7, 0x50, 0xd5, 0x67, 0xf1, 0x94, 0xd4, + 0xca, 0xa0, 0xad, 0xeb, 0x7f, 0x43, 0xb0, 0xa7, 0xc7, 0x7b, 0xc6, 0xd4, 0x78, 0xc2, 0x53, 0xfc, + 0x74, 0x67, 0x43, 0x1a, 0x3b, 0x9f, 0xa0, 0x60, 0x82, 0xe1, 0x72, 0xc6, 0x6f, 0x96, 0x44, 0xb0, + 0x62, 0x50, 0x75, 0xaa, 0x63, 0x7c, 0x00, 0x95, 0xcf, 0x6c, 0x3a, 0xe7, 0x7a, 0x4e, 0x75, 0x9a, + 0xff, 0xf1, 0x5b, 0x60, 0x67, 0x79, 0xd8, 0x01, 0xb3, 0x77, 0xe1, 0x1a, 0xd9, 0x92, 0x9c, 0xf7, + 0x2e, 0x5c, 0x94, 0x09, 0xb4, 0xe7, 0x9a, 0x5a, 0xa0, 0x3d, 0xd7, 0x3a, 0x0e, 0xe0, 0xc1, 0x5b, + 0x96, 0xaa, 0x88, 0x4d, 0x29, 0x97, 0xb3, 0x44, 0x48, 0x3e, 0x50, 0x29, 0x53, 0x3c, 0x5c, 0xe2, + 0x1a, 0xd8, 0xef, 0x3a, 0xf4, 0xdc, 0x35, 0x70, 0x1d, 0x2a, 0x9d, 0x6e, 0x9f, 0x0e, 0x5d, 0xd4, + 0x7d, 0xb4, 0xfa, 0xe3, 0x19, 0xab, 0xb5, 0x87, 0xae, 0xd7, 0x1e, 0xfa, 0xbd, 0xf6, 0xd0, 0xd7, + 0x8d, 0x67, 0x5c, 0x6f, 0x3c, 0xe3, 0xe7, 0xc6, 0x33, 0x3e, 0x54, 0x8b, 0xa7, 0x34, 0x72, 0xf4, + 0x63, 0x78, 0xf6, 0x37, 0x00, 0x00, 0xff, 0xff, 0x20, 0x34, 0x7d, 0xbe, 0x62, 0x03, 0x00, 0x00, } func (m *Chunk) Marshal() (dAtA []byte, err error) { @@ -469,10 +348,6 @@ func (m *Chunk) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Hash != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Hash)) i-- @@ -513,10 +388,6 @@ func (m *Series) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Chunks) > 0 { for iNdEx := len(m.Chunks) - 1; iNdEx >= 0; iNdEx-- { { @@ -568,10 +439,6 @@ func (m *AggrChunk) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Counter != nil { { size, err := m.Counter.MarshalToSizedBuffer(dAtA[:i]) @@ -677,10 +544,6 @@ func (m *LabelMatcher) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.Value) > 0 { i -= len(m.Value) copy(dAtA[i:], m.Value) @@ -730,9 +593,6 @@ func (m *Chunk) Size() (n int) { if m.Hash != 0 { n += 1 + sovTypes(uint64(m.Hash)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -754,9 +614,6 @@ func (m *Series) Size() (n int) { n += 1 + l + sovTypes(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -796,9 +653,6 @@ func (m *AggrChunk) Size() (n int) { l = m.Counter.Size() n += 1 + l + sovTypes(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -819,9 +673,6 @@ func (m *LabelMatcher) Size() (n int) { if l > 0 { n += 1 + l + sovTypes(uint64(l)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -944,7 +795,6 @@ func (m *Chunk) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1012,7 +862,7 @@ func (m *Series) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, &labelpb.Label{}) + m.Labels = append(m.Labels, labelpb.Label{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1046,7 +896,7 @@ func (m *Series) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Chunks = append(m.Chunks, &AggrChunk{}) + m.Chunks = append(m.Chunks, AggrChunk{}) if err := m.Chunks[len(m.Chunks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1063,7 +913,6 @@ func (m *Series) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1368,7 +1217,6 @@ func (m *AggrChunk) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1502,7 +1350,6 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/store/storepb/types.proto b/pkg/store/storepb/types.proto index cb426c7895..2435b16f15 100644 --- a/pkg/store/storepb/types.proto +++ b/pkg/store/storepb/types.proto @@ -6,8 +6,19 @@ package thanos; option go_package = "storepb"; +import "gogoproto/gogo.proto"; import "store/labelpb/types.proto"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; message Chunk { enum Encoding { @@ -17,12 +28,12 @@ message Chunk { } Encoding type = 1; bytes data = 2; - uint64 hash = 3; + uint64 hash = 3 [(gogoproto.nullable) = true]; } message Series { - repeated Label labels = 1; - repeated AggrChunk chunks = 2; + repeated Label labels = 1 [(gogoproto.nullable) = false]; + repeated AggrChunk chunks = 2 [(gogoproto.nullable) = false]; } message AggrChunk { diff --git a/pkg/store/tsdb.go b/pkg/store/tsdb.go index 8853a39f20..589c05fc5c 100644 --- a/pkg/store/tsdb.go +++ b/pkg/store/tsdb.go @@ -93,11 +93,11 @@ func (s *TSDBStore) getExtLset() labels.Labels { return s.extLset } -func (s *TSDBStore) LabelSet() []*labelpb.LabelSet { +func (s *TSDBStore) LabelSet() []labelpb.LabelSet { labels := labelpb.PromLabelsToLabelpbLabels(s.getExtLset()) - labelSets := []*labelpb.LabelSet{} + labelSets := []labelpb.LabelSet{} if len(labels) > 0 { - labelSets = append(labelSets, &labelpb.LabelSet{ + labelSets = append(labelSets, labelpb.LabelSet{ Labels: labels, }) } @@ -105,16 +105,16 @@ func (s *TSDBStore) LabelSet() []*labelpb.LabelSet { return labelSets } -func (p *TSDBStore) TSDBInfos() []*infopb.TSDBInfo { +func (p *TSDBStore) TSDBInfos() []infopb.TSDBInfo { labels := p.LabelSet() if len(labels) == 0 { - return []*infopb.TSDBInfo{} + return []infopb.TSDBInfo{} } mint, maxt := p.TimeRange() - return []*infopb.TSDBInfo{ + return []infopb.TSDBInfo{ { - Labels: &labelpb.LabelSet{ + Labels: labelpb.LabelSet{ Labels: labels[0].Labels, }, MinTime: mint, @@ -236,7 +236,7 @@ func (s *TSDBStore) Series(r *storepb.SeriesRequest, seriesSrv storepb.Store_Ser } frameBytesLeft := bytesLeftForChunks - seriesChunks := []*storepb.AggrChunk{} + seriesChunks := []storepb.AggrChunk{} chIter := series.Iterator(nil) isNext := chIter.Next() for isNext { @@ -257,7 +257,7 @@ func (s *TSDBStore) Series(r *storepb.SeriesRequest, seriesSrv storepb.Store_Ser }, } frameBytesLeft -= c.Size() - seriesChunks = append(seriesChunks, &c) + seriesChunks = append(seriesChunks, c) // We are fine with minor inaccuracy of max bytes per frame. The inaccuracy will be max of full chunk size. isNext = chIter.Next() @@ -270,7 +270,7 @@ func (s *TSDBStore) Series(r *storepb.SeriesRequest, seriesSrv storepb.Store_Ser if isNext { frameBytesLeft = bytesLeftForChunks - seriesChunks = make([]*storepb.AggrChunk, 0, len(seriesChunks)) + seriesChunks = make([]storepb.AggrChunk, 0, len(seriesChunks)) } } if err := chIter.Err(); err != nil { diff --git a/pkg/store/tsdb_selector.go b/pkg/store/tsdb_selector.go index df49412cf7..463ab96b14 100644 --- a/pkg/store/tsdb_selector.go +++ b/pkg/store/tsdb_selector.go @@ -57,7 +57,7 @@ func (sr *TSDBSelector) runRelabelRules(labelSets []labels.Labels) []labels.Labe } // MatchersForLabelSets generates a list of label matchers for the given label sets. -func MatchersForLabelSets(labelSets []labels.Labels) []*storepb.LabelMatcher { +func MatchersForLabelSets(labelSets []labels.Labels) []storepb.LabelMatcher { var ( // labelNameCounts tracks how many times a label name appears in the given label // sets. This is used to make sure that an explicit empty value matcher is @@ -86,11 +86,11 @@ func MatchersForLabelSets(labelSets []labels.Labels) []*storepb.LabelMatcher { } } - matchers := make([]*storepb.LabelMatcher, 0, len(labelNameValues)) + matchers := make([]storepb.LabelMatcher, 0, len(labelNameValues)) for lblName, lblVals := range labelNameValues { values := maps.Keys(lblVals) sort.Strings(values) - matcher := &storepb.LabelMatcher{ + matcher := storepb.LabelMatcher{ Name: lblName, Value: strings.Join(values, "|"), Type: storepb.LabelMatcher_RE, diff --git a/pkg/store/tsdb_selector_test.go b/pkg/store/tsdb_selector_test.go index 4a9cb8af83..745c4edb2e 100644 --- a/pkg/store/tsdb_selector_test.go +++ b/pkg/store/tsdb_selector_test.go @@ -17,19 +17,19 @@ func TestMatchersForLabelSets(t *testing.T) { tests := []struct { name string labelSets []labels.Labels - want []*storepb.LabelMatcher + want []storepb.LabelMatcher }{ { name: "empty label sets", labelSets: nil, - want: []*storepb.LabelMatcher{}, + want: []storepb.LabelMatcher{}, }, { name: "single label set with single label", labelSets: []labels.Labels{ labels.FromStrings("a", "1"), }, - want: []*storepb.LabelMatcher{ + want: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1"}, }, }, @@ -39,7 +39,7 @@ func TestMatchersForLabelSets(t *testing.T) { labels.FromStrings("a", "1"), labels.FromStrings("a", "2"), }, - want: []*storepb.LabelMatcher{ + want: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1|2"}, }, }, @@ -48,7 +48,7 @@ func TestMatchersForLabelSets(t *testing.T) { labelSets: []labels.Labels{ labels.FromStrings("a", "1", "b", "2"), }, - want: []*storepb.LabelMatcher{ + want: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1"}, {Type: storepb.LabelMatcher_RE, Name: "b", Value: "2"}, }, @@ -59,7 +59,7 @@ func TestMatchersForLabelSets(t *testing.T) { labels.FromStrings("a", "1"), labels.FromStrings("a", "2"), }, - want: []*storepb.LabelMatcher{ + want: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1|2"}, }, }, @@ -69,7 +69,7 @@ func TestMatchersForLabelSets(t *testing.T) { labels.FromStrings("a", "1"), labels.FromStrings("b", "2"), }, - want: []*storepb.LabelMatcher{ + want: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "1|^$"}, {Type: storepb.LabelMatcher_RE, Name: "b", Value: "2|^$"}, }, diff --git a/pkg/store/tsdb_test.go b/pkg/store/tsdb_test.go index c8c19ab1ee..784e4cf04a 100644 --- a/pkg/store/tsdb_test.go +++ b/pkg/store/tsdb_test.go @@ -55,7 +55,7 @@ func TestTSDBStore_Series_ChunkChecksum(t *testing.T) { req := &storepb.SeriesRequest{ MinTime: 1, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, } @@ -101,7 +101,7 @@ func TestTSDBStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, }, @@ -117,7 +117,7 @@ func TestTSDBStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 2, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, }, @@ -133,7 +133,7 @@ func TestTSDBStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 4, MaxTime: 6, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, }, @@ -144,7 +144,7 @@ func TestTSDBStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "region", Value: "eu-west"}, }, }, @@ -155,7 +155,7 @@ func TestTSDBStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "b", Value: "1"}, }, }, @@ -166,7 +166,7 @@ func TestTSDBStore_Series(t *testing.T) { req: &storepb.SeriesRequest{ MinTime: 1, MaxTime: 3, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "1"}, }, SkipChunks: true, @@ -259,7 +259,7 @@ func TestTSDBStore_SeriesAccessWithDelegateClosing(t *testing.T) { testutil.Ok(t, store.Series(&storepb.SeriesRequest{ MinTime: 0, MaxTime: math.MaxInt64, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, @@ -421,7 +421,7 @@ func TestTSDBStore_SeriesAccessWithoutDelegateClosing(t *testing.T) { testutil.Ok(t, store.Series(&storepb.SeriesRequest{ MinTime: 0, MaxTime: math.MaxInt64, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, @@ -563,16 +563,16 @@ func benchTSDBStoreSeries(t testutil.TB, totalSamples, totalSeries int) { // Add external labels & frame it. s := r.GetSeries() bytesLeftForChunks := store.maxBytesPerFrame - lbls := make([]*labelpb.Label, 0, len(s.Labels)+extLabels.Len()) + lbls := make([]labelpb.Label, 0, len(s.Labels)+extLabels.Len()) for _, l := range s.Labels { - lbls = append(lbls, &labelpb.Label{ + lbls = append(lbls, labelpb.Label{ Name: l.Name, Value: l.Value, }) bytesLeftForChunks -= lbls[len(lbls)-1].Size() } extLabels.Range(func(l labels.Label) { - lbls = append(lbls, &labelpb.Label{ + lbls = append(lbls, labelpb.Label{ Name: l.Name, Value: l.Value, }) @@ -609,7 +609,7 @@ func benchTSDBStoreSeries(t testutil.TB, totalSamples, totalSeries int) { Req: &storepb.SeriesRequest{ MinTime: 0, MaxTime: math.MaxInt64, - Matchers: []*storepb.LabelMatcher{ + Matchers: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, }, PartialResponseStrategy: storepb.PartialResponseStrategy_ABORT, diff --git a/pkg/targets/prometheus_test.go b/pkg/targets/prometheus_test.go index 160a8ac44e..b6324948dd 100644 --- a/pkg/targets/prometheus_test.go +++ b/pkg/targets/prometheus_test.go @@ -73,7 +73,7 @@ scrape_configs: expected := &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: p.Addr()}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -82,7 +82,7 @@ scrape_configs: {Name: "job", Value: "myself"}, {Name: "replica", Value: "test1"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: p.Addr()}, {Name: "job", Value: "myself"}, {Name: "replica", Value: "test1"}, @@ -91,13 +91,13 @@ scrape_configs: ScrapeUrl: fmt.Sprintf("http://%s/metrics", p.Addr()), GlobalUrl: "", Health: targetspb.TargetHealth_UP, - LastScrape: nil, + LastScrape: time.Time{}, LastScrapeDuration: 0, }, }, DroppedTargets: []*targetspb.DroppedTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:80"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -148,7 +148,7 @@ scrape_configs: for i := range targets.ActiveTargets { targets.ActiveTargets[i].LastScrapeDuration = 0 - targets.ActiveTargets[i].LastScrape = nil + targets.ActiveTargets[i].LastScrape = time.Time{} targets.ActiveTargets[i].LastError = "" targets.ActiveTargets[i].GlobalUrl = "" } diff --git a/pkg/targets/targets.go b/pkg/targets/targets.go index 4b0874a75d..8dfe9431f7 100644 --- a/pkg/targets/targets.go +++ b/pkg/targets/targets.go @@ -169,8 +169,8 @@ func dedupActiveTargets(activeTargets []*targetspb.ActiveTarget, replicaLabels m return activeTargets[:i+1] } -func removeReplicaLabels(labels []*storepb.Label, replicaLabels map[string]struct{}) []*storepb.Label { - newLabels := make([]*storepb.Label, 0, len(labels)) +func removeReplicaLabels(labels []storepb.Label, replicaLabels map[string]struct{}) []storepb.Label { + newLabels := make([]storepb.Label, 0, len(labels)) for _, l := range labels { if _, ok := replicaLabels[l.Name]; !ok { newLabels = append(newLabels, l) diff --git a/pkg/targets/targets_test.go b/pkg/targets/targets_test.go index 0cdc6f961c..b4b9d926df 100644 --- a/pkg/targets/targets_test.go +++ b/pkg/targets/targets_test.go @@ -8,7 +8,6 @@ import ( "time" "github.com/efficientgo/core/testutil" - "github.com/thanos-io/thanos/pkg/rules/rulespb" "github.com/thanos-io/thanos/pkg/store/labelpb" "github.com/thanos-io/thanos/pkg/targets/targetspb" ) @@ -30,7 +29,7 @@ func TestDedupTargets(t *testing.T) { targets: &targetspb.TargetDiscovery{ DroppedTargets: []*targetspb.DroppedTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:80"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -40,7 +39,7 @@ func TestDedupTargets(t *testing.T) { }}, }, { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:80"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -54,7 +53,7 @@ func TestDedupTargets(t *testing.T) { want: &targetspb.TargetDiscovery{ DroppedTargets: []*targetspb.DroppedTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:80"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -71,7 +70,7 @@ func TestDedupTargets(t *testing.T) { targets: &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -79,7 +78,7 @@ func TestDedupTargets(t *testing.T) { {Name: "prometheus", Value: "ha"}, {Name: "replica", Value: "0"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -90,7 +89,7 @@ func TestDedupTargets(t *testing.T) { Health: targetspb.TargetHealth_UP, }, { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -98,7 +97,7 @@ func TestDedupTargets(t *testing.T) { {Name: "prometheus", Value: "ha"}, {Name: "replica", Value: "1"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -113,14 +112,14 @@ func TestDedupTargets(t *testing.T) { want: &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -138,7 +137,7 @@ func TestDedupTargets(t *testing.T) { targets: &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -146,7 +145,7 @@ func TestDedupTargets(t *testing.T) { {Name: "prometheus", Value: "ha"}, {Name: "replica", Value: "0"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -157,7 +156,7 @@ func TestDedupTargets(t *testing.T) { Health: targetspb.TargetHealth_UP, }, { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -165,7 +164,7 @@ func TestDedupTargets(t *testing.T) { {Name: "prometheus", Value: "ha"}, {Name: "replica", Value: "1"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -180,14 +179,14 @@ func TestDedupTargets(t *testing.T) { want: &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -205,7 +204,7 @@ func TestDedupTargets(t *testing.T) { targets: &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -213,7 +212,7 @@ func TestDedupTargets(t *testing.T) { {Name: "prometheus", Value: "ha"}, {Name: "replica", Value: "0"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -222,10 +221,10 @@ func TestDedupTargets(t *testing.T) { ScrapePool: "myself", ScrapeUrl: "http://localhost:9090/metrics", Health: targetspb.TargetHealth_UP, - LastScrape: rulespb.TimeToTimestamp(time.Unix(1, 0)), + LastScrape: time.Unix(1, 0), }, { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -233,7 +232,7 @@ func TestDedupTargets(t *testing.T) { {Name: "prometheus", Value: "ha"}, {Name: "replica", Value: "1"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -242,21 +241,21 @@ func TestDedupTargets(t *testing.T) { ScrapePool: "myself", ScrapeUrl: "http://localhost:9090/metrics", Health: targetspb.TargetHealth_UP, - LastScrape: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastScrape: time.Unix(2, 0), }, }, }, want: &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -264,7 +263,7 @@ func TestDedupTargets(t *testing.T) { ScrapePool: "myself", ScrapeUrl: "http://localhost:9090/metrics", Health: targetspb.TargetHealth_UP, - LastScrape: rulespb.TimeToTimestamp(time.Unix(2, 0)), + LastScrape: time.Unix(2, 0), }, }, }, diff --git a/pkg/targets/targetspb/custom.go b/pkg/targets/targetspb/custom.go index 79b14f0f4e..188ddf65cd 100644 --- a/pkg/targets/targetspb/custom.go +++ b/pkg/targets/targetspb/custom.go @@ -10,7 +10,6 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/thanos-io/thanos/pkg/rules/rulespb" "github.com/thanos-io/thanos/pkg/store/labelpb" ) @@ -81,11 +80,11 @@ func (t1 *ActiveTarget) CompareState(t2 *ActiveTarget) int { return d } - if rulespb.TimestampToTime(t1.LastScrape).Before(rulespb.TimestampToTime(t2.LastScrape)) { + if t1.LastScrape.Before(t2.LastScrape) { return 1 } - if rulespb.TimestampToTime(t1.LastScrape).After(rulespb.TimestampToTime(t2.LastScrape)) { + if t1.LastScrape.After(t2.LastScrape) { return -1 } @@ -101,30 +100,30 @@ func (t1 *DroppedTarget) Compare(t2 *DroppedTarget) int { } func (t *ActiveTarget) SetLabels(ls labels.Labels) { - var result *labelpb.LabelSet + var result labelpb.LabelSet if !ls.IsEmpty() { - result = &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} + result = labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} } t.Labels = result } func (t *ActiveTarget) SetDiscoveredLabels(ls labels.Labels) { - var result *labelpb.LabelSet + var result labelpb.LabelSet if !ls.IsEmpty() { - result = &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} + result = labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} } t.DiscoveredLabels = result } func (t *DroppedTarget) SetDiscoveredLabels(ls labels.Labels) { - var result *labelpb.LabelSet + var result labelpb.LabelSet if !ls.IsEmpty() { - result = &labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} + result = labelpb.LabelSet{Labels: labelpb.PromLabelsToLabelpbLabels(ls)} } t.DiscoveredLabels = result diff --git a/pkg/targets/targetspb/rpc.pb.go b/pkg/targets/targetspb/rpc.pb.go index 5992692b0d..8083c215ec 100644 --- a/pkg/targets/targetspb/rpc.pb.go +++ b/pkg/targets/targetspb/rpc.pb.go @@ -7,23 +7,28 @@ import ( context "context" encoding_binary "encoding/binary" fmt "fmt" + + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + io "io" math "math" math_bits "math/bits" + time "time" - proto "github.com/gogo/protobuf/proto" - rulespb "github.com/thanos-io/thanos/pkg/rules/rulespb" labelpb "github.com/thanos-io/thanos/pkg/store/labelpb" storepb "github.com/thanos-io/thanos/pkg/store/storepb" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -92,9 +97,6 @@ func (TargetsRequest_State) EnumDescriptor() ([]byte, []int) { type TargetsRequest struct { State TargetsRequest_State `protobuf:"varint,1,opt,name=state,proto3,enum=thanos.TargetsRequest_State" json:"state,omitempty"` PartialResponseStrategy storepb.PartialResponseStrategy `protobuf:"varint,2,opt,name=partial_response_strategy,json=partialResponseStrategy,proto3,enum=thanos.PartialResponseStrategy" json:"partial_response_strategy,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } func (m *TargetsRequest) Reset() { *m = TargetsRequest{} } @@ -130,28 +132,11 @@ func (m *TargetsRequest) XXX_DiscardUnknown() { var xxx_messageInfo_TargetsRequest proto.InternalMessageInfo -func (m *TargetsRequest) GetState() TargetsRequest_State { - if m != nil { - return m.State - } - return TargetsRequest_ANY -} - -func (m *TargetsRequest) GetPartialResponseStrategy() storepb.PartialResponseStrategy { - if m != nil { - return m.PartialResponseStrategy - } - return storepb.PartialResponseStrategy_WARN -} - type TargetsResponse struct { // Types that are valid to be assigned to Result: // *TargetsResponse_Targets // *TargetsResponse_Warning - Result isTargetsResponse_Result `protobuf_oneof:"result"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Result isTargetsResponse_Result `protobuf_oneof:"result"` } func (m *TargetsResponse) Reset() { *m = TargetsResponse{} } @@ -233,13 +218,8 @@ func (*TargetsResponse) XXX_OneofWrappers() []interface{} { } type TargetDiscovery struct { - // @gotags: json:"activeTargets" - ActiveTargets []*ActiveTarget `protobuf:"bytes,1,rep,name=activeTargets,proto3" json:"activeTargets"` - // @gotags: json:"droppedTargets" - DroppedTargets []*DroppedTarget `protobuf:"bytes,2,rep,name=droppedTargets,proto3" json:"droppedTargets"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ActiveTargets []*ActiveTarget `protobuf:"bytes,1,rep,name=activeTargets,proto3" json:"activeTargets"` + DroppedTargets []*DroppedTarget `protobuf:"bytes,2,rep,name=droppedTargets,proto3" json:"droppedTargets"` } func (m *TargetDiscovery) Reset() { *m = TargetDiscovery{} } @@ -275,42 +255,16 @@ func (m *TargetDiscovery) XXX_DiscardUnknown() { var xxx_messageInfo_TargetDiscovery proto.InternalMessageInfo -func (m *TargetDiscovery) GetActiveTargets() []*ActiveTarget { - if m != nil { - return m.ActiveTargets - } - return nil -} - -func (m *TargetDiscovery) GetDroppedTargets() []*DroppedTarget { - if m != nil { - return m.DroppedTargets - } - return nil -} - type ActiveTarget struct { - // @gotags: json:"discoveredLabels" - DiscoveredLabels *labelpb.LabelSet `protobuf:"bytes,1,opt,name=discoveredLabels,proto3" json:"discoveredLabels"` - // @gotags: json:"labels" - Labels *labelpb.LabelSet `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels"` - // @gotags: json:"scrapePool" - ScrapePool string `protobuf:"bytes,3,opt,name=scrapePool,proto3" json:"scrapePool"` - // @gotags: json:"scrapeUrl" - ScrapeUrl string `protobuf:"bytes,4,opt,name=scrapeUrl,proto3" json:"scrapeUrl"` - // @gotags: json:"globalUrl" - GlobalUrl string `protobuf:"bytes,5,opt,name=globalUrl,proto3" json:"globalUrl"` - // @gotags: json:"lastError" - LastError string `protobuf:"bytes,6,opt,name=lastError,proto3" json:"lastError"` - // @gotags: json:"lastScrape" - LastScrape *rulespb.Timestamp `protobuf:"bytes,7,opt,name=lastScrape,proto3" json:"lastScrape"` - // @gotags: json:"lastScrapeDuration" - LastScrapeDuration float64 `protobuf:"fixed64,8,opt,name=lastScrapeDuration,proto3" json:"lastScrapeDuration"` - // @gotags: json:"health" - Health TargetHealth `protobuf:"varint,9,opt,name=health,proto3,enum=thanos.TargetHealth" json:"health"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + DiscoveredLabels labelpb.LabelSet `protobuf:"bytes,1,opt,name=discoveredLabels,proto3" json:"discoveredLabels"` + Labels labelpb.LabelSet `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels"` + ScrapePool string `protobuf:"bytes,3,opt,name=scrapePool,proto3" json:"scrapePool"` + ScrapeUrl string `protobuf:"bytes,4,opt,name=scrapeUrl,proto3" json:"scrapeUrl"` + GlobalUrl string `protobuf:"bytes,5,opt,name=globalUrl,proto3" json:"globalUrl"` + LastError string `protobuf:"bytes,6,opt,name=lastError,proto3" json:"lastError"` + LastScrape time.Time `protobuf:"bytes,7,opt,name=lastScrape,proto3,stdtime" json:"lastScrape"` + LastScrapeDuration float64 `protobuf:"fixed64,8,opt,name=lastScrapeDuration,proto3" json:"lastScrapeDuration"` + Health TargetHealth `protobuf:"varint,9,opt,name=health,proto3,enum=thanos.TargetHealth" json:"health"` } func (m *ActiveTarget) Reset() { *m = ActiveTarget{} } @@ -346,75 +300,8 @@ func (m *ActiveTarget) XXX_DiscardUnknown() { var xxx_messageInfo_ActiveTarget proto.InternalMessageInfo -func (m *ActiveTarget) GetDiscoveredLabels() *labelpb.LabelSet { - if m != nil { - return m.DiscoveredLabels - } - return nil -} - -func (m *ActiveTarget) GetLabels() *labelpb.LabelSet { - if m != nil { - return m.Labels - } - return nil -} - -func (m *ActiveTarget) GetScrapePool() string { - if m != nil { - return m.ScrapePool - } - return "" -} - -func (m *ActiveTarget) GetScrapeUrl() string { - if m != nil { - return m.ScrapeUrl - } - return "" -} - -func (m *ActiveTarget) GetGlobalUrl() string { - if m != nil { - return m.GlobalUrl - } - return "" -} - -func (m *ActiveTarget) GetLastError() string { - if m != nil { - return m.LastError - } - return "" -} - -func (m *ActiveTarget) GetLastScrape() *rulespb.Timestamp { - if m != nil { - return m.LastScrape - } - return nil -} - -func (m *ActiveTarget) GetLastScrapeDuration() float64 { - if m != nil { - return m.LastScrapeDuration - } - return 0 -} - -func (m *ActiveTarget) GetHealth() TargetHealth { - if m != nil { - return m.Health - } - return TargetHealth_DOWN -} - type DroppedTarget struct { - // @gotags: json:"discoveredLabels" - DiscoveredLabels *labelpb.LabelSet `protobuf:"bytes,1,opt,name=discoveredLabels,proto3" json:"discoveredLabels"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + DiscoveredLabels labelpb.LabelSet `protobuf:"bytes,1,opt,name=discoveredLabels,proto3" json:"discoveredLabels"` } func (m *DroppedTarget) Reset() { *m = DroppedTarget{} } @@ -450,13 +337,6 @@ func (m *DroppedTarget) XXX_DiscardUnknown() { var xxx_messageInfo_DroppedTarget proto.InternalMessageInfo -func (m *DroppedTarget) GetDiscoveredLabels() *labelpb.LabelSet { - if m != nil { - return m.DiscoveredLabels - } - return nil -} - func init() { proto.RegisterEnum("thanos.TargetHealth", TargetHealth_name, TargetHealth_value) proto.RegisterEnum("thanos.TargetsRequest_State", TargetsRequest_State_name, TargetsRequest_State_value) @@ -470,44 +350,51 @@ func init() { func init() { proto.RegisterFile("targets/targetspb/rpc.proto", fileDescriptor_b5cdaee03579e907) } var fileDescriptor_b5cdaee03579e907 = []byte{ - // 583 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xcf, 0x6f, 0xd3, 0x30, - 0x14, 0x5e, 0xd2, 0x2d, 0x59, 0xdf, 0x7e, 0x10, 0xac, 0x41, 0xb3, 0x31, 0x95, 0x29, 0xa7, 0x82, - 0xa0, 0x85, 0xee, 0x86, 0xc6, 0x61, 0x23, 0x13, 0x43, 0x40, 0x57, 0xb9, 0x1b, 0x08, 0x38, 0x4c, - 0x6e, 0x6b, 0x75, 0x91, 0xbc, 0xda, 0xd8, 0xee, 0xd0, 0x8e, 0xdc, 0xf9, 0xc3, 0x38, 0x70, 0xe0, - 0x4f, 0x40, 0xfb, 0x4b, 0x50, 0xec, 0x24, 0x4d, 0xb7, 0x72, 0xe2, 0xe2, 0xc4, 0xdf, 0xf7, 0xbd, - 0xef, 0x3d, 0xdb, 0xcf, 0x86, 0x07, 0x9a, 0xc8, 0x11, 0xd5, 0xaa, 0x95, 0x7d, 0x45, 0xbf, 0x25, - 0xc5, 0xa0, 0x29, 0x24, 0xd7, 0x1c, 0x79, 0xfa, 0x9c, 0x8c, 0xb9, 0xda, 0xda, 0x54, 0x9a, 0x4b, - 0xda, 0x32, 0xa3, 0xe8, 0xb7, 0xf4, 0x95, 0xa0, 0xca, 0x4a, 0x72, 0x8a, 0x91, 0x3e, 0x65, 0x37, - 0xa8, 0x9a, 0x9c, 0x30, 0xaa, 0x5a, 0x66, 0x2c, 0xdb, 0x46, 0xbf, 0x1c, 0x58, 0x3f, 0xb1, 0xe9, - 0x30, 0xfd, 0x3a, 0xa1, 0x4a, 0xa3, 0x36, 0x2c, 0x29, 0x4d, 0x34, 0x0d, 0x9d, 0x1d, 0xa7, 0xb1, - 0xde, 0xde, 0x6e, 0xda, 0xcc, 0xcd, 0x59, 0x59, 0xb3, 0x97, 0x6a, 0xb0, 0x95, 0xa2, 0x2f, 0xb0, - 0x29, 0x88, 0xd4, 0x09, 0x61, 0x67, 0x92, 0x2a, 0xc1, 0xc7, 0x8a, 0x9e, 0x29, 0x2d, 0x89, 0xa6, - 0xa3, 0xab, 0xd0, 0x35, 0x3e, 0x0f, 0x73, 0x9f, 0xae, 0x15, 0xe2, 0x4c, 0xd7, 0xcb, 0x64, 0xb8, - 0x26, 0xe6, 0x13, 0xd1, 0x23, 0x58, 0x32, 0xc9, 0x90, 0x0f, 0x95, 0xfd, 0xce, 0xa7, 0x60, 0x01, - 0x01, 0x78, 0xfb, 0xaf, 0x4e, 0xde, 0x7c, 0x38, 0x0c, 0x1c, 0xb4, 0x02, 0x7e, 0x8c, 0x8f, 0xbb, - 0xdd, 0xc3, 0x38, 0x70, 0x23, 0x06, 0x77, 0x8a, 0x32, 0xad, 0x0b, 0xda, 0x05, 0x3f, 0xdb, 0x4f, - 0xb3, 0xa0, 0x95, 0x76, 0x6d, 0x76, 0x41, 0x71, 0xa2, 0x06, 0xfc, 0x92, 0xca, 0xab, 0xa3, 0x05, - 0x9c, 0x2b, 0xd1, 0x16, 0xf8, 0xdf, 0x88, 0x1c, 0x27, 0xe3, 0x91, 0xa9, 0xbe, 0x9a, 0x72, 0x19, - 0x70, 0xb0, 0x0c, 0x9e, 0xa4, 0x6a, 0xc2, 0x74, 0xf4, 0xc3, 0xc9, 0xd3, 0x15, 0x26, 0xe8, 0x05, - 0xac, 0x91, 0x81, 0x4e, 0x2e, 0xe9, 0x49, 0x91, 0xb4, 0xd2, 0x58, 0x69, 0x6f, 0xe4, 0x49, 0xf7, - 0x4b, 0x24, 0x9e, 0x95, 0xa2, 0x97, 0xb0, 0x3e, 0x94, 0x5c, 0x08, 0x3a, 0xcc, 0x83, 0x5d, 0x13, - 0x7c, 0x2f, 0x0f, 0x8e, 0xcb, 0x2c, 0xbe, 0x21, 0x8e, 0xbe, 0x57, 0x60, 0xb5, 0x6c, 0x8f, 0xf6, - 0x20, 0x18, 0x66, 0x85, 0xd1, 0xe1, 0xbb, 0xb4, 0x2d, 0xf2, 0x3d, 0x08, 0x72, 0x47, 0x83, 0xf6, - 0xa8, 0xc6, 0xb7, 0x94, 0xa8, 0x01, 0x1e, 0xb3, 0x31, 0xee, 0x3f, 0x62, 0x32, 0x1e, 0xd5, 0x01, - 0xd4, 0x40, 0x12, 0x41, 0xbb, 0x9c, 0xb3, 0xb0, 0x92, 0x6e, 0x18, 0x2e, 0x21, 0x68, 0x1b, 0xaa, - 0x76, 0x76, 0x2a, 0x59, 0xb8, 0x68, 0xe8, 0x29, 0x90, 0xb2, 0x23, 0xc6, 0xfb, 0x84, 0xa5, 0xec, - 0x92, 0x65, 0x0b, 0x20, 0x65, 0x19, 0x51, 0xfa, 0x50, 0x4a, 0x2e, 0x43, 0xcf, 0xb2, 0x05, 0x80, - 0x9e, 0x03, 0xa4, 0x93, 0x9e, 0x31, 0x0b, 0x7d, 0x53, 0xe7, 0xdd, 0xe2, 0x7c, 0x93, 0x0b, 0xaa, - 0x34, 0xb9, 0x10, 0xb8, 0x24, 0x42, 0x4d, 0x40, 0xd3, 0x59, 0x3c, 0x91, 0x44, 0x27, 0x7c, 0x1c, - 0x2e, 0xef, 0x38, 0x0d, 0x07, 0xcf, 0x61, 0xd0, 0x13, 0xf0, 0xce, 0x29, 0x61, 0xfa, 0x3c, 0xac, - 0x9a, 0x3e, 0xde, 0x98, 0x6d, 0x9f, 0x23, 0xc3, 0xe1, 0x4c, 0x13, 0xbd, 0x87, 0xb5, 0x99, 0x43, - 0xfa, 0xbf, 0x33, 0x78, 0xfc, 0x14, 0x56, 0xcb, 0x69, 0xd0, 0x32, 0x2c, 0xc6, 0xc7, 0x1f, 0x3b, - 0xc1, 0x02, 0xf2, 0xc0, 0x3d, 0xed, 0xda, 0xf6, 0x3f, 0xed, 0xbc, 0xed, 0xa4, 0xa0, 0xdb, 0x7e, - 0x0d, 0x7e, 0xde, 0x4b, 0x7b, 0xd3, 0xdf, 0xfb, 0xf3, 0x6f, 0xf0, 0x56, 0xed, 0x16, 0x6e, 0xaf, - 0xcc, 0x33, 0xe7, 0xa0, 0xf6, 0xf3, 0xba, 0xee, 0xfc, 0xbe, 0xae, 0x3b, 0x7f, 0xae, 0xeb, 0xce, - 0xe7, 0x6a, 0xf1, 0x20, 0xf5, 0x3d, 0xf3, 0x6c, 0xec, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xfb, - 0x94, 0xf8, 0x3a, 0xac, 0x04, 0x00, 0x00, + // 699 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x54, 0xc1, 0x72, 0xd3, 0x3c, + 0x10, 0xb6, 0xd3, 0x36, 0x69, 0xb6, 0x6d, 0xfe, 0xfc, 0x9a, 0xfe, 0xad, 0x9b, 0x9f, 0x89, 0x3b, + 0xb9, 0x50, 0x60, 0xb0, 0x99, 0xf4, 0xc2, 0x81, 0x4b, 0x4d, 0x0a, 0x65, 0x80, 0x34, 0x28, 0x69, + 0x19, 0xe0, 0xd0, 0x51, 0x12, 0xe1, 0x66, 0xc6, 0x8d, 0x8c, 0xa4, 0x94, 0xe9, 0x5b, 0xf4, 0x45, + 0x38, 0xf1, 0x12, 0x3d, 0x70, 0xe8, 0x91, 0x93, 0x81, 0xf6, 0x96, 0xa7, 0x60, 0x2c, 0xdb, 0xb1, + 0xd3, 0x86, 0x23, 0x17, 0x6b, 0xf7, 0xdb, 0x6f, 0xbf, 0x5d, 0x59, 0x5a, 0xc1, 0xff, 0x92, 0x70, + 0x97, 0x4a, 0x61, 0xc7, 0xab, 0xdf, 0xb5, 0xb9, 0xdf, 0xb3, 0x7c, 0xce, 0x24, 0x43, 0x79, 0x79, + 0x4c, 0x86, 0x4c, 0x54, 0x36, 0x84, 0x64, 0x9c, 0xda, 0xea, 0xeb, 0x77, 0x6d, 0x79, 0xe6, 0x53, + 0x11, 0x51, 0x2a, 0xab, 0x2e, 0x73, 0x99, 0x32, 0xed, 0xd0, 0x8a, 0xd1, 0x38, 0xc1, 0x23, 0x5d, + 0xea, 0xdd, 0x48, 0x30, 0x5d, 0xc6, 0x5c, 0x8f, 0xda, 0xca, 0xeb, 0x8e, 0x3e, 0xda, 0x72, 0x70, + 0x42, 0x85, 0x24, 0x27, 0x7e, 0x44, 0xa8, 0x7d, 0xd3, 0xa1, 0xd4, 0x89, 0x9a, 0xc1, 0xf4, 0xd3, + 0x88, 0x0a, 0x89, 0xea, 0xb0, 0x20, 0x24, 0x91, 0xd4, 0xd0, 0x37, 0xf5, 0xad, 0x52, 0xfd, 0x8e, + 0x15, 0xf5, 0x65, 0x4d, 0xd3, 0xac, 0x76, 0xc8, 0xc1, 0x11, 0x15, 0x7d, 0x80, 0x0d, 0x9f, 0x70, + 0x39, 0x20, 0xde, 0x11, 0xa7, 0xc2, 0x67, 0x43, 0x41, 0x8f, 0x84, 0xe4, 0x44, 0x52, 0xf7, 0xcc, + 0xc8, 0x29, 0x1d, 0x33, 0xd1, 0x69, 0x45, 0x44, 0x1c, 0xf3, 0xda, 0x31, 0x0d, 0xaf, 0xfb, 0xb3, + 0x03, 0xb5, 0x7b, 0xb0, 0xa0, 0x8a, 0xa1, 0x02, 0xcc, 0xed, 0x34, 0xdf, 0x95, 0x35, 0x04, 0x90, + 0xdf, 0x79, 0xda, 0x79, 0x71, 0xb8, 0x5b, 0xd6, 0xd1, 0x12, 0x14, 0x1a, 0x78, 0xbf, 0xd5, 0xda, + 0x6d, 0x94, 0x73, 0x35, 0x0f, 0xfe, 0x99, 0xb4, 0x19, 0xa9, 0xa0, 0x6d, 0x28, 0xc4, 0x7f, 0x5b, + 0x6d, 0x68, 0xa9, 0xbe, 0x3e, 0xbd, 0xa1, 0xc6, 0x40, 0xf4, 0xd8, 0x29, 0xe5, 0x67, 0x7b, 0x1a, + 0x4e, 0x98, 0xa8, 0x02, 0x85, 0xcf, 0x84, 0x0f, 0x07, 0x43, 0x57, 0x75, 0x5f, 0x0c, 0x63, 0x31, + 0xe0, 0x2c, 0x42, 0x9e, 0x53, 0x31, 0xf2, 0x64, 0xed, 0xab, 0x9e, 0x94, 0x9b, 0x88, 0xa0, 0xd7, + 0xb0, 0x42, 0x7a, 0x72, 0x70, 0x4a, 0x3b, 0x93, 0xa2, 0x73, 0x5b, 0x4b, 0xf5, 0xd5, 0xa4, 0xe8, + 0x4e, 0x26, 0xe8, 0xfc, 0x3b, 0x0e, 0xcc, 0x69, 0x3a, 0x9e, 0x76, 0xd1, 0x1b, 0x28, 0xf5, 0x39, + 0xf3, 0x7d, 0xda, 0x4f, 0xf4, 0x72, 0x4a, 0xef, 0xbf, 0x44, 0xaf, 0x91, 0x8d, 0x3a, 0x68, 0x1c, + 0x98, 0x37, 0x12, 0xf0, 0x0d, 0xbf, 0xf6, 0x65, 0x1e, 0x96, 0xb3, 0x5d, 0xa0, 0x43, 0x28, 0xf7, + 0xe3, 0xfe, 0x69, 0xff, 0x55, 0x78, 0x8b, 0x92, 0x5f, 0x55, 0x4e, 0xaa, 0x28, 0xb4, 0x4d, 0xa5, + 0x63, 0x5c, 0x04, 0xa6, 0x36, 0x0e, 0xcc, 0x5b, 0x19, 0xf8, 0x16, 0x82, 0x1e, 0x43, 0xde, 0x8b, + 0xd4, 0x72, 0x7f, 0x50, 0x2b, 0xc5, 0x6a, 0x31, 0x0f, 0xc7, 0x2b, 0xb2, 0x00, 0x44, 0x8f, 0x13, + 0x9f, 0xb6, 0x18, 0xf3, 0x8c, 0xb9, 0xf0, 0x04, 0x9c, 0xd2, 0x38, 0x30, 0x33, 0x28, 0xce, 0xd8, + 0xe8, 0x01, 0x14, 0x23, 0xef, 0x80, 0x7b, 0xc6, 0xbc, 0xa2, 0xaf, 0x8c, 0x03, 0x33, 0x05, 0x71, + 0x6a, 0x86, 0x64, 0xd7, 0x63, 0x5d, 0xe2, 0x85, 0xe4, 0x85, 0x94, 0x3c, 0x01, 0x71, 0x6a, 0x86, + 0x64, 0x8f, 0x08, 0xb9, 0xcb, 0x39, 0xe3, 0x46, 0x3e, 0x25, 0x4f, 0x40, 0x9c, 0x9a, 0x08, 0x03, + 0x84, 0x4e, 0x5b, 0x95, 0x32, 0x0a, 0x6a, 0xd3, 0x15, 0x2b, 0x1a, 0x41, 0x2b, 0x19, 0x41, 0xab, + 0x93, 0x8c, 0xa0, 0xb3, 0x16, 0x6f, 0x3f, 0x93, 0x75, 0xfe, 0xc3, 0xd4, 0x71, 0xc6, 0x47, 0xcf, + 0x00, 0xa5, 0x5e, 0x63, 0xc4, 0x89, 0x1c, 0xb0, 0xa1, 0xb1, 0xb8, 0xa9, 0x6f, 0xe9, 0xce, 0xda, + 0x38, 0x30, 0x67, 0x44, 0xf1, 0x0c, 0x2c, 0x3c, 0x8c, 0x63, 0x4a, 0x3c, 0x79, 0x6c, 0x14, 0xd5, + 0x38, 0xae, 0x4e, 0x4f, 0xc1, 0x9e, 0x8a, 0x39, 0x10, 0x1e, 0x46, 0xc4, 0xc3, 0xf1, 0x5a, 0x73, + 0x61, 0x65, 0xea, 0x92, 0xfd, 0xad, 0xfb, 0x72, 0xff, 0x21, 0x2c, 0x67, 0x9b, 0x41, 0x8b, 0x30, + 0xdf, 0xd8, 0x7f, 0xdb, 0x2c, 0x6b, 0x28, 0x0f, 0xb9, 0x83, 0x56, 0x34, 0xeb, 0x07, 0xcd, 0x97, + 0xcd, 0x10, 0xcc, 0xd5, 0x9f, 0x43, 0x21, 0x99, 0x92, 0x27, 0xa9, 0xb9, 0x36, 0xfb, 0xb9, 0xaa, + 0xac, 0xdf, 0xc2, 0xa3, 0xf7, 0xe1, 0x91, 0xee, 0xdc, 0xbd, 0xf8, 0x55, 0xd5, 0x2e, 0xae, 0xaa, + 0xfa, 0xe5, 0x55, 0x55, 0xff, 0x79, 0x55, 0xd5, 0xcf, 0xaf, 0xab, 0xda, 0xe5, 0x75, 0x55, 0xfb, + 0x7e, 0x5d, 0xd5, 0xde, 0x17, 0x27, 0x6f, 0x75, 0x37, 0xaf, 0xce, 0x70, 0xfb, 0x77, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x81, 0x5b, 0x5e, 0xa6, 0xc7, 0x05, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -641,10 +528,6 @@ func (m *TargetsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.PartialResponseStrategy != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.PartialResponseStrategy)) i-- @@ -678,10 +561,6 @@ func (m *TargetsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Result != nil { { size := m.Result.Size() @@ -749,10 +628,6 @@ func (m *TargetDiscovery) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if len(m.DroppedTargets) > 0 { for iNdEx := len(m.DroppedTargets) - 1; iNdEx >= 0; iNdEx-- { { @@ -804,10 +679,6 @@ func (m *ActiveTarget) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } if m.Health != 0 { i = encodeVarintRpc(dAtA, i, uint64(m.Health)) i-- @@ -819,18 +690,14 @@ func (m *ActiveTarget) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x41 } - if m.LastScrape != nil { - { - size, err := m.LastScrape.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x3a + n2, err2 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.LastScrape, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.LastScrape):]) + if err2 != nil { + return 0, err2 } + i -= n2 + i = encodeVarintRpc(dAtA, i, uint64(n2)) + i-- + dAtA[i] = 0x3a if len(m.LastError) > 0 { i -= len(m.LastError) copy(dAtA[i:], m.LastError) @@ -859,30 +726,26 @@ func (m *ActiveTarget) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x1a } - if m.Labels != nil { - { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0x12 + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } - if m.DiscoveredLabels != nil { - { - size, err := m.DiscoveredLabels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x12 + { + size, err := m.DiscoveredLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0xa + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -906,22 +769,16 @@ func (m *DroppedTarget) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.DiscoveredLabels != nil { - { - size, err := m.DiscoveredLabels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRpc(dAtA, i, uint64(size)) + { + size, err := m.DiscoveredLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err } - i-- - dAtA[i] = 0xa + i -= size + i = encodeVarintRpc(dAtA, i, uint64(size)) } + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -948,9 +805,6 @@ func (m *TargetsRequest) Size() (n int) { if m.PartialResponseStrategy != 0 { n += 1 + sovRpc(uint64(m.PartialResponseStrategy)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -963,9 +817,6 @@ func (m *TargetsResponse) Size() (n int) { if m.Result != nil { n += m.Result.Size() } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1009,9 +860,6 @@ func (m *TargetDiscovery) Size() (n int) { n += 1 + l + sovRpc(uint64(l)) } } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1021,14 +869,10 @@ func (m *ActiveTarget) Size() (n int) { } var l int _ = l - if m.DiscoveredLabels != nil { - l = m.DiscoveredLabels.Size() - n += 1 + l + sovRpc(uint64(l)) - } - if m.Labels != nil { - l = m.Labels.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = m.DiscoveredLabels.Size() + n += 1 + l + sovRpc(uint64(l)) + l = m.Labels.Size() + n += 1 + l + sovRpc(uint64(l)) l = len(m.ScrapePool) if l > 0 { n += 1 + l + sovRpc(uint64(l)) @@ -1045,19 +889,14 @@ func (m *ActiveTarget) Size() (n int) { if l > 0 { n += 1 + l + sovRpc(uint64(l)) } - if m.LastScrape != nil { - l = m.LastScrape.Size() - n += 1 + l + sovRpc(uint64(l)) - } + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.LastScrape) + n += 1 + l + sovRpc(uint64(l)) if m.LastScrapeDuration != 0 { n += 9 } if m.Health != 0 { n += 1 + sovRpc(uint64(m.Health)) } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } return n } @@ -1067,13 +906,8 @@ func (m *DroppedTarget) Size() (n int) { } var l int _ = l - if m.DiscoveredLabels != nil { - l = m.DiscoveredLabels.Size() - n += 1 + l + sovRpc(uint64(l)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } + l = m.DiscoveredLabels.Size() + n += 1 + l + sovRpc(uint64(l)) return n } @@ -1162,7 +996,6 @@ func (m *TargetsRequest) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1280,7 +1113,6 @@ func (m *TargetsResponse) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1399,7 +1231,6 @@ func (m *TargetDiscovery) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1467,9 +1298,6 @@ func (m *ActiveTarget) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.DiscoveredLabels == nil { - m.DiscoveredLabels = &labelpb.LabelSet{} - } if err := m.DiscoveredLabels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1503,9 +1331,6 @@ func (m *ActiveTarget) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Labels == nil { - m.Labels = &labelpb.LabelSet{} - } if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1667,10 +1492,7 @@ func (m *ActiveTarget) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.LastScrape == nil { - m.LastScrape = &rulespb.Timestamp{} - } - if err := m.LastScrape.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.LastScrape, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -1716,7 +1538,6 @@ func (m *ActiveTarget) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } @@ -1784,9 +1605,6 @@ func (m *DroppedTarget) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.DiscoveredLabels == nil { - m.DiscoveredLabels = &labelpb.LabelSet{} - } if err := m.DiscoveredLabels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1803,7 +1621,6 @@ func (m *DroppedTarget) Unmarshal(dAtA []byte) error { if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } diff --git a/pkg/targets/targetspb/rpc.proto b/pkg/targets/targetspb/rpc.proto index 4a875090a4..e60ef7416a 100644 --- a/pkg/targets/targetspb/rpc.proto +++ b/pkg/targets/targetspb/rpc.proto @@ -5,11 +5,23 @@ syntax = "proto3"; package thanos; import "store/storepb/types.proto"; +import "gogoproto/gogo.proto"; import "store/labelpb/types.proto"; -import "rules/rulespb/rpc.proto"; +import "google/protobuf/timestamp.proto"; option go_package = "targetspb"; +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; +option (gogoproto.goproto_getters_all) = false; + +// Do not generate XXX fields to reduce memory footprint and opening a door +// for zero-copy casts to/from prometheus data types. +option (gogoproto.goproto_unkeyed_all) = false; +option (gogoproto.goproto_unrecognized_all) = false; +option (gogoproto.goproto_sizecache_all) = false; + /// Targets represents API that is responsible for gathering targets and their states. service Targets { /// Targets has info for all targets. @@ -41,10 +53,8 @@ message TargetsResponse { } message TargetDiscovery { - // @gotags: json:"activeTargets" - repeated ActiveTarget activeTargets = 1; - // @gotags: json:"droppedTargets" - repeated DroppedTarget droppedTargets = 2; + repeated ActiveTarget activeTargets = 1 [(gogoproto.jsontag) = "activeTargets"]; + repeated DroppedTarget droppedTargets = 2 [(gogoproto.jsontag) = "droppedTargets"]; } enum TargetHealth { @@ -54,27 +64,17 @@ enum TargetHealth { } message ActiveTarget { - // @gotags: json:"discoveredLabels" - LabelSet discoveredLabels = 1; - // @gotags: json:"labels" - LabelSet labels = 2; - // @gotags: json:"scrapePool" - string scrapePool = 3; - // @gotags: json:"scrapeUrl" - string scrapeUrl = 4; - // @gotags: json:"globalUrl" - string globalUrl = 5; - // @gotags: json:"lastError" - string lastError = 6; - // @gotags: json:"lastScrape" - Timestamp lastScrape = 7; - // @gotags: json:"lastScrapeDuration" - double lastScrapeDuration = 8; - // @gotags: json:"health" - TargetHealth health = 9; + LabelSet discoveredLabels = 1 [(gogoproto.jsontag) = "discoveredLabels", (gogoproto.nullable) = false]; + LabelSet labels = 2 [(gogoproto.jsontag) = "labels", (gogoproto.nullable) = false]; + string scrapePool = 3 [(gogoproto.jsontag) = "scrapePool"]; + string scrapeUrl = 4 [(gogoproto.jsontag) = "scrapeUrl"]; + string globalUrl = 5 [(gogoproto.jsontag) = "globalUrl"]; + string lastError = 6 [(gogoproto.jsontag) = "lastError"]; + google.protobuf.Timestamp lastScrape = 7 [(gogoproto.jsontag) = "lastScrape", (gogoproto.stdtime) = true, (gogoproto.nullable) = false]; + double lastScrapeDuration = 8 [(gogoproto.jsontag) = "lastScrapeDuration"]; + TargetHealth health = 9 [(gogoproto.jsontag) = "health"]; } message DroppedTarget { - // @gotags: json:"discoveredLabels" - LabelSet discoveredLabels = 1; + LabelSet discoveredLabels = 1 [(gogoproto.jsontag) = "discoveredLabels", (gogoproto.nullable) = false]; } diff --git a/pkg/tenancy/tenancy_test.go b/pkg/tenancy/tenancy_test.go index e4fcc1d3ac..4418fa36e4 100644 --- a/pkg/tenancy/tenancy_test.go +++ b/pkg/tenancy/tenancy_test.go @@ -135,7 +135,7 @@ func TestTenantProxyPassing(t *testing.T) { _, _ = q.LabelValues(ctx, &storepb.LabelValuesRequest{}) _, _ = q.LabelNames(ctx, &storepb.LabelNamesRequest{}) - seriesMatchers := []*storepb.LabelMatcher{ + seriesMatchers := []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, } @@ -181,7 +181,7 @@ func TestTenantProxyPassing(t *testing.T) { _, _ = q.LabelValues(ctx, &storepb.LabelValuesRequest{}) _, _ = q.LabelNames(ctx, &storepb.LabelNamesRequest{}) - seriesMatchers := []*storepb.LabelMatcher{ + seriesMatchers := []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, } diff --git a/pkg/testutil/testpromcompatibility/api_compatibility.go b/pkg/testutil/testpromcompatibility/api_compatibility.go index 19d54c8c4c..684e10ad24 100644 --- a/pkg/testutil/testpromcompatibility/api_compatibility.go +++ b/pkg/testutil/testpromcompatibility/api_compatibility.go @@ -49,7 +49,7 @@ type Alert struct { Labels labels.Labels `json:"labels"` Annotations labels.Labels `json:"annotations"` State string `json:"state"` - ActiveAt time.Time `json:"activeAt,omitempty"` + ActiveAt *time.Time `json:"activeAt,omitempty"` Value string `json:"value"` PartialResponseStrategy string `json:"partialResponseStrategy"` diff --git a/pkg/tracing/tracing_middleware/grpctesting/gogotestpb/fields.proto b/pkg/tracing/tracing_middleware/grpctesting/gogotestpb/fields.proto index 317b9a715d..1c7546dffc 100644 --- a/pkg/tracing/tracing_middleware/grpctesting/gogotestpb/fields.proto +++ b/pkg/tracing/tracing_middleware/grpctesting/gogotestpb/fields.proto @@ -14,6 +14,8 @@ package grpc_middleware.gogotestpb; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; +option (gogoproto.gogoproto_import) = false; + option go_package = "gogotestpb"; message Metadata { diff --git a/scripts/genproto.sh b/scripts/genproto.sh index 2a9d5c664c..ad3a9da3b5 100755 --- a/scripts/genproto.sh +++ b/scripts/genproto.sh @@ -9,7 +9,6 @@ PROTOC_VERSION=${PROTOC_VERSION:-3.20.1} PROTOC_BIN=${PROTOC_BIN:-protoc} GOIMPORTS_BIN=${GOIMPORTS_BIN:-goimports} PROTOC_GEN_GOGOFAST_BIN=${PROTOC_GEN_GOGOFAST_BIN:-protoc-gen-gogofast} -PROTOC_GO_INJECT_TAG_BIN=${PROTOC_GO_INJECT_TAG_BIN:-protoc-go-inject-tag} if ! [[ "scripts/genproto.sh" =~ $0 ]]; then echo "must be run from repository root" @@ -35,19 +34,15 @@ for dir in ${DIRS}; do -I=. \ -I="${GOGOPROTO_PATH}" \ ${dir}/*.proto - ${PROTOC_GO_INJECT_TAG_BIN} -input=${dir}/*pb.go pushd ${dir} sed -i.bak -E 's/import _ \"gogoproto\"//g' *.pb.go sed -i.bak -E 's/_ \"google\/protobuf\"//g' *.pb.go - sed -i.bak -E 's/protobuf \"google\/protobuf\"/protobuf \"github.com\/gogo\/protobuf\/types\"/g' *.pb.go - sed -i.bak -E 's|rulespb "rules/rulespb"|rulespb "github.com/thanos-io/thanos/pkg/rules/rulespb"|g' *.pb.go # We cannot do Mstore/storepb/types.proto=github.com/thanos-io/thanos/pkg/store/storepb,\ due to protobuf v1 bug. # TODO(bwplotka): Consider removing in v2. sed -i.bak -E 's/\"store\/storepb\"/\"github.com\/thanos-io\/thanos\/pkg\/store\/storepb\"/g' *.pb.go sed -i.bak -E 's/\"store\/labelpb\"/\"github.com\/thanos-io\/thanos\/pkg\/store\/labelpb\"/g' *.pb.go sed -i.bak -E 's/\"store\/storepb\/prompb\"/\"github.com\/thanos-io\/thanos\/pkg\/store\/storepb\/prompb\"/g' *.pb.go - sed -i.bak -E 's/protobuf \"google\/protobuf\"/protobuf \"github.com\/gogo\/protobuf\/types\"/g' *.pb.go rm -f *.bak ${GOIMPORTS_BIN} -w *.pb.go popd @@ -63,13 +58,10 @@ for dir in ${CORTEX_DIRS}; do -I="${GOGOPROTO_PATH}" \ -I=. \ ${dir}/*.proto - ${PROTOC_GO_INJECT_TAG_BIN} -input=${dir}/*pb.go pushd ${dir} sed -i.bak -E 's/import _ \"gogoproto\"//g' *.pb.go sed -i.bak -E 's/_ \"google\/protobuf\"//g' *.pb.go - sed -i.bak -E 's|rulespb "rules/rulespb"|rulespb "github.com/thanos-io/thanos/pkg/rules/rulespb"|g' *.pb.go - sed -i.bak -E 's/protobuf \"google\/protobuf\"/protobuf \"github.com\/gogo\/protobuf\/types\"/g' *.pb.go sed -i.bak -E 's/\"cortex\/cortexpb\"/\"github.com\/thanos-io\/thanos\/internal\/cortex\/cortexpb\"/g' *.pb.go rm -f *.bak ${GOIMPORTS_BIN} -w *.pb.go diff --git a/test/e2e/rule_test.go b/test/e2e/rule_test.go index 53eedc40ba..4f918a0c5f 100644 --- a/test/e2e/rule_test.go +++ b/test/e2e/rule_test.go @@ -272,7 +272,7 @@ func rulegroupCorrectData(t *testing.T, ctx context.Context, endpoint string) { for _, g := range data.Data.Groups { testutil.Assert(t, g.EvaluationDurationSeconds > 0, "expected it to take more than zero seconds to evaluate") - testutil.Assert(t, !rulespb.TimestampToTime(g.LastEvaluation).IsZero(), "expected the rule group to be evaluated at least once") + testutil.Assert(t, !g.LastEvaluation.IsZero(), "expected the rule group to be evaluated at least once") } } @@ -831,7 +831,7 @@ func TestStatelessRulerAlertStateRestore(t *testing.T) { if alerts[0].State == rulespb.AlertState_FIRING { // The second ruler alert's active at time is the same as the previous one, // which means the alert state is restored successfully. - if rulespb.TimestampToTime(alertActiveAt).Unix() == rulespb.TimestampToTime(alerts[0].ActiveAt).Unix() { + if alertActiveAt.Unix() == alerts[0].ActiveAt.Unix() { return nil } else { return fmt.Errorf("alert active time is not restored") diff --git a/test/e2e/rules_api_test.go b/test/e2e/rules_api_test.go index 1319df4fab..fe1d7ee5a0 100644 --- a/test/e2e/rules_api_test.go +++ b/test/e2e/rules_api_test.go @@ -104,7 +104,7 @@ func TestRulesAPI_Fanout(t *testing.T) { Name: "TestAlert_AbortOnPartialResponse", State: rulespb.AlertState_FIRING, Query: "absent(some_metric)", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "prometheus", Value: "ha"}, {Name: "severity", Value: "page"}, }}, @@ -120,7 +120,7 @@ func TestRulesAPI_Fanout(t *testing.T) { Name: "TestAlert_AbortOnPartialResponse", State: rulespb.AlertState_FIRING, Query: "absent(some_metric)", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "severity", Value: "page"}, }}, Health: string(rules.HealthGood), @@ -135,7 +135,7 @@ func TestRulesAPI_Fanout(t *testing.T) { Name: "TestAlert_WarnOnPartialResponse", State: rulespb.AlertState_FIRING, Query: "absent(some_metric)", - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "severity", Value: "page"}, }}, Health: string(rules.HealthGood), @@ -151,7 +151,7 @@ func TestRulesAPI_Fanout(t *testing.T) { Name: "TestAlert_WithLimit", State: rulespb.AlertState_INACTIVE, Query: `promhttp_metric_handler_requests_total`, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "severity", Value: "page"}, }}, Health: string(rules.HealthBad), @@ -184,7 +184,7 @@ func ruleAndAssert(t *testing.T, ctx context.Context, addr, typ string, want []* } for ig, g := range res { - res[ig].LastEvaluation = nil + res[ig].LastEvaluation = time.Time{} res[ig].EvaluationDurationSeconds = 0 res[ig].Interval = 0 res[ig].PartialResponseStrategy = 0 diff --git a/test/e2e/targets_api_test.go b/test/e2e/targets_api_test.go index 3b87681fc5..6a446cc1e4 100644 --- a/test/e2e/targets_api_test.go +++ b/test/e2e/targets_api_test.go @@ -64,7 +64,7 @@ func TestTargetsAPI_Fanout(t *testing.T) { targetAndAssert(t, ctx, q.Endpoint("http"), "", &targetspb.TargetDiscovery{ ActiveTargets: []*targetspb.ActiveTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:9090"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -73,7 +73,7 @@ func TestTargetsAPI_Fanout(t *testing.T) { {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, }}, - Labels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + Labels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "instance", Value: "localhost:9090"}, {Name: "job", Value: "myself"}, {Name: "prometheus", Value: "ha"}, @@ -85,7 +85,7 @@ func TestTargetsAPI_Fanout(t *testing.T) { }, DroppedTargets: []*targetspb.DroppedTarget{ { - DiscoveredLabels: &labelpb.LabelSet{Labels: []*labelpb.Label{ + DiscoveredLabels: labelpb.LabelSet{Labels: []labelpb.Label{ {Name: "__address__", Value: "localhost:80"}, {Name: "__metrics_path__", Value: "/metrics"}, {Name: "__scheme__", Value: "http"}, @@ -120,7 +120,7 @@ func targetAndAssert(t *testing.T, ctx context.Context, addr, state string, want } for it := range res.ActiveTargets { - res.ActiveTargets[it].LastScrape = nil + res.ActiveTargets[it].LastScrape = time.Time{} res.ActiveTargets[it].LastScrapeDuration = 0 res.ActiveTargets[it].GlobalUrl = "" }