From 953ef772256329114d4037cc0d6d6402ed70a9e2 Mon Sep 17 00:00:00 2001 From: faceair Date: Tue, 17 Nov 2020 15:02:41 +0800 Subject: [PATCH] init promate project --- .gitignore | 4 + LICENSE.txt | 201 ++++ Makefile | 13 + README.md | 36 + cmd/matecarbon/main.go | 463 +++++++++ cmd/mateinsert/main.go | 183 ++++ cmd/mateinsert/main_test.go | 30 + cmd/matequery/main.go | 84 ++ docs/arch.png | Bin 0 -> 27160 bytes examples/carbonapi.yaml | 23 + examples/matecarbon.yaml | 16 + go.mod | 18 + go.sum | 57 ++ licenses/m3.license.txt | 201 ++++ licenses/metricsql.license.txt | 190 ++++ mateql/aggr.go | 55 + mateql/aggr_test.go | 30 + mateql/binary_op.go | 205 ++++ mateql/binary_op_test.go | 125 +++ mateql/binaryop/funcs.go | 104 ++ mateql/doc.go | 7 + mateql/lexer.go | 644 ++++++++++++ mateql/lexer_test.go | 501 +++++++++ mateql/parser.go | 1744 ++++++++++++++++++++++++++++++++ mateql/parser_example_test.go | 34 + mateql/parser_test.go | 684 +++++++++++++ mateql/regexp_cache.go | 99 ++ mateql/rollup.go | 75 ++ mateql/transform.go | 90 ++ mateql/utils.go | 12 + mateql/utils_example_test.go | 26 + mateql/utils_test.go | 43 + prometheus/converter.go | 129 +++ prometheus/converter_test.go | 259 +++++ prometheus/glob.go | 138 +++ prometheus/glob_test.go | 127 +++ prometheus/mateql.go | 63 ++ prometheus/mateql_test.go | 76 ++ prometheus/response.go | 62 ++ prometheus/response_test.go | 42 + 40 files changed, 6893 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE.txt create mode 100644 Makefile create mode 100644 README.md create mode 100644 cmd/matecarbon/main.go create mode 100644 cmd/mateinsert/main.go create mode 100644 cmd/mateinsert/main_test.go create mode 100644 cmd/matequery/main.go create mode 100644 docs/arch.png create mode 100644 examples/carbonapi.yaml create mode 100644 examples/matecarbon.yaml create mode 100644 go.mod create mode 100644 go.sum create mode 100644 licenses/m3.license.txt create mode 100644 licenses/metricsql.license.txt create mode 100755 mateql/aggr.go create mode 100644 mateql/aggr_test.go create mode 100644 mateql/binary_op.go create mode 100644 mateql/binary_op_test.go create mode 100644 mateql/binaryop/funcs.go create mode 100644 mateql/doc.go create mode 100644 mateql/lexer.go create mode 100644 mateql/lexer_test.go create mode 100755 mateql/parser.go create mode 100644 mateql/parser_example_test.go create mode 100755 mateql/parser_test.go create mode 100755 mateql/regexp_cache.go create mode 100755 mateql/rollup.go create mode 100755 mateql/transform.go create mode 100644 mateql/utils.go create mode 100644 mateql/utils_example_test.go create mode 100644 mateql/utils_test.go create mode 100644 prometheus/converter.go create mode 100644 prometheus/converter_test.go create mode 100644 prometheus/glob.go create mode 100644 prometheus/glob_test.go create mode 100644 prometheus/mateql.go create mode 100644 prometheus/mateql_test.go create mode 100644 prometheus/response.go create mode 100644 prometheus/response_test.go diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6b358a8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.DS_Store +.idea +bin +coverage.out diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..c4a48ba --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright (c) 2020 Zhizhesihai (Beijing) Technology Limited. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..bf0c4bc --- /dev/null +++ b/Makefile @@ -0,0 +1,13 @@ +BIN_PACKAGES = ../cmd/matecarbon ../cmd/mateinsert ../cmd/matequery + +build: + mkdir -p bin && cd bin && for pkg in $(BIN_PACKAGES); do GOOS=linux GOARCH=amd64 go build $$pkg; done + +lint: + golangci-lint run ./... + +test: lint + go test -coverprofile=coverage.out ./... + +coverage: lint + gocov test ./... | gocov-html > coverage.html && open coverage.html diff --git a/README.md b/README.md new file mode 100644 index 0000000..2386e98 --- /dev/null +++ b/README.md @@ -0,0 +1,36 @@ +# Promate - Graphite On VictoriaMetrics + +Promate is a high-performance graphite storage solution. + +Compare with Whisper: + +* 10x faster on average; 60-100x faster for complex, long range queries +* 90% storage space reduction, 99.99% IOPS reduction +* 80% reduction in memory and CPU overhead with constant query pressure + +This is a comparison of performance from our production environment. Welcome to help us design tests that give reproducible benchmark results. + +### Features + +* Higher performance with lower cpu, memory, and storage usage, benefit from the excellent [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics) +* Supports almost all graphite functions, benefit from compatible with [carbonapi](https://github.com/go-graphite/carbonapi) +* MateQL language, support query graphite metrics with PromQL +* Real-time aggregation, no loss of accuracy of historical metrics + +### Architecture +![Overview](docs/arch.png) + +### Example Config + +1. [carbonapi.yaml](examples/carbonapi.yaml) +1. [matecarbon.yaml](examples/matecarbon.yaml) + +### Thanks + +* [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics) & [metricsql](https://github.com/VictoriaMetrics/metricsql) +* [carbonapi](https://github.com/go-graphite/carbonapi) +* [m3](https://github.com/m3db/m3) + +### License + +[Apache License 2.0](LICENSE.txt) diff --git a/cmd/matecarbon/main.go b/cmd/matecarbon/main.go new file mode 100644 index 0000000..329e84a --- /dev/null +++ b/cmd/matecarbon/main.go @@ -0,0 +1,463 @@ +package main + +import ( + "context" + "flag" + "fmt" + "io" + "io/ioutil" + "math" + "net" + "net/http" + "regexp" + "sync" + "time" + + "github.com/go-chi/chi" + "github.com/go-chi/chi/middleware" + protov3 "github.com/go-graphite/protocol/carbonapi_v3_pb" + "github.com/imroc/req" + jsoniter "github.com/json-iterator/go" + log "github.com/sirupsen/logrus" + "github.com/zhihu/promate/prometheus" + "gopkg.in/yaml.v3" +) + +var defaultMaxDatapoints float64 = 1024 +var json = jsoniter.ConfigCompatibleWithStandardLibrary + +type RollupConfig struct { + MatchSuffix string `yaml:"match_suffix"` + MatchSuffixRe *regexp.Regexp `yaml:"-"` + RollupFunc string `yaml:"rollup_func"` +} + +type Config struct { + Listen string `yaml:"listen"` + LogLevel log.Level `yaml:"-"` + StatsdFlushInterval float64 `yaml:"statsd_flush_interval"` + PrometheusURL string `yaml:"prometheus_url"` + PrometheusMaxBody int64 `yaml:"prometheus_max_body"` + Rollups []*RollupConfig `yaml:"rollups"` + DefaultRollupFunc string `yaml:"default_rollup_func"` +} + +func LoadConfig(configPath string) (*Config, error) { + body, err := ioutil.ReadFile(configPath) + if err != nil { + return nil, err + } + config := new(Config) + err = yaml.Unmarshal(body, &config) + if err != nil { + return nil, err + } + for _, rollup := range config.Rollups { + rollup.MatchSuffixRe, err = regexp.Compile(fmt.Sprintf("%s$", rollup.MatchSuffix)) + if err != nil { + return nil, err + } + } + return config, err +} + +func main() { + var configPath string + flag.StringVar(&configPath, "c", "matecarbon.yaml", "config file path") + flag.Parse() + + config, err := LoadConfig(configPath) + if err != nil { + log.Fatal(err) + } + + log.SetFormatter(&log.TextFormatter{ + FullTimestamp: true, + TimestampFormat: "2006-01-02 15:04:05", + }) + + wrapper := newWrapper(config) + + router := chi.NewRouter() + + router.Use(middleware.Recoverer) + router.Use(middleware.Logger) + router.Use(middleware.RealIP) + + router.Get("/check_health", func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("zhi~")) + }) + + router.Mount("/debug", middleware.Profiler()) + + router.Get("/metrics/find/", func(w http.ResponseWriter, r *http.Request) { + body, err := ioutil.ReadAll(r.Body) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + multiRequest := &protov3.MultiGlobRequest{} + err = multiRequest.Unmarshal(body) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + multiResponse, err := wrapper.Find(r.Context(), multiRequest) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + blob, err := multiResponse.Marshal() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "application/x-protobuf") + _, _ = w.Write(blob) + }) + + router.Get("/render/", func(w http.ResponseWriter, r *http.Request) { + body, err := ioutil.ReadAll(r.Body) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + multiRequest := &protov3.MultiFetchRequest{} + err = multiRequest.Unmarshal(body) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + multiResponse, err := wrapper.Render(r.Context(), multiRequest) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + blob, err := multiResponse.Marshal() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "application/x-protobuf") + _, _ = w.Write(blob) + }) + + log.Fatal(http.ListenAndServe(config.Listen, router)) +} + +func newWrapper(config *Config) *Wrapper { + request := req.New() + request.SetClient(&http.Client{ + Transport: &http.Transport{ + DialContext: (&net.Dialer{ + Timeout: 1 * time.Second, + KeepAlive: 1 * time.Second, + }).DialContext, + MaxIdleConns: 0, + MaxIdleConnsPerHost: 0, + IdleConnTimeout: 30 * time.Second, + TLSHandshakeTimeout: 1 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + }, + // Don't worry about the request taking too long. + // It will end when the user's request context cancel or VictoriaMetrics timeout. + Timeout: time.Minute * 10, + }) + return &Wrapper{ + config: config, + request: request, + } +} + +type Wrapper struct { + config *Config + request *req.Req +} + +func (w *Wrapper) Find(ctx context.Context, multiRequest *protov3.MultiGlobRequest) (multiResponse *protov3.MultiGlobResponse, err error) { + var wg sync.WaitGroup + var lock sync.Mutex + + multiResponse = &protov3.MultiGlobResponse{ + Metrics: make([]protov3.GlobResponse, 0), + } + + for _, target := range multiRequest.Metrics { + wg.Add(1) + go func(target string) { + logger := log.WithFields(log.Fields{ + "type": "find", + "path": target, + }) + + startTime := time.Now() + defer func() { + wg.Done() + + logger.Infof("took %s", time.Since(startTime)) + }() + + // We can't query the full amount of metrics, which can cause serious performance issues. + // https://github.com/VictoriaMetrics/VictoriaMetrics/issues/329#issuecomment-590773944 + if target == "*" { + logger.Warnf("can't query full amount metrics") + return + } + + // Too large a request will cause the prometheus backend to fail to respond. + // This is usually caused by the automatic expansion of the All option of the Grafana variable. + // Most of the time you can customize the value of the All option to be * . + // Now the carbonapi_v3_pb protocol doesn't return custom errors, so it's ignored here. + if len(target) > 8192 { + logger.Errorf("path too long") + return + } + + var params req.Param + + name, filters := prometheus.ConvertGraphiteTarget(target, false) + selector := filters.Build(name) + + prefix, query, fast := prometheus.ConvertQueryLabel(target) + // In VictoriaMetrics, query is faster without query params. + // We can use this approach for the second segment of our graphite metrics. + // https://github.com/VictoriaMetrics/VictoriaMetrics/issues/359#issuecomment-596098714 + if !fast { + params = req.Param{ + "start": multiRequest.StartTime, + "end": multiRequest.StopTime, + "match[]": selector, + } + } + + resp, err := w.request.Get(fmt.Sprintf("%s/api/v1/label/%s/values", w.config.PrometheusURL, query), ctx, params) + if err != nil { + logger.Errorf("request failed %s", err) + return + } + + defer func() { _ = resp.Response().Body.Close() }() + + // We restrict particularly large responses to queries that can use MateQL. + body, err := ioutil.ReadAll(io.LimitReader(resp.Response().Body, w.config.PrometheusMaxBody)) + if err != nil { + logger.Errorf("read response failed %s", err) + return + } + + data := new(prometheus.ValuesResponse) + err = json.Unmarshal(body, data) + if err != nil { + logger.Errorf("unmarshal %s failed %s", string(body), err) + return + } + + metric := protov3.GlobResponse{ + Name: target, + Matches: make([]protov3.GlobMatch, 0, len(data.Data)), + } + for _, label := range data.Data { + metric.Matches = append(metric.Matches, protov3.GlobMatch{ + IsLeaf: false, + Path: prefix + label, + }) + } + + lock.Lock() + multiResponse.Metrics = append(multiResponse.Metrics, metric) + lock.Unlock() + }(target) + } + + wg.Wait() + return multiResponse, nil +} + +func (w *Wrapper) Render(ctx context.Context, multiRequest *protov3.MultiFetchRequest) (multiResponse *protov3.MultiFetchResponse, err error) { + var wg sync.WaitGroup + var locker sync.Mutex + + multiResponse = &protov3.MultiFetchResponse{ + Metrics: make([]protov3.FetchResponse, 0), + } + for _, request := range multiRequest.Metrics { + wg.Add(1) + go func(request protov3.FetchRequest) { + logger := log.WithFields(log.Fields{ + "type": "render", + "start": request.StartTime, + "end": request.StopTime, + "path": request.PathExpression, + "max_data_points": request.MaxDataPoints, + }) + + startTime := time.Now() + defer func() { + wg.Done() + + logger.Infof("took %s", time.Since(startTime)) + }() + + // For the same reasons as above. + if len(request.PathExpression) > 8192 { + logger.Errorf("path too long") + return + } + + name, filters := prometheus.ConvertGraphiteTarget(request.PathExpression, true) + selector := filters.Build(name) + + // The default value is used when the request does not take the MaxDataPoints. + // Most of these requests come from scripts, not Grafana. + maxDataPoints := float64(request.MaxDataPoints) + if maxDataPoints == 0 { + maxDataPoints = defaultMaxDatapoints + } + timeRange := float64(request.StopTime - request.StartTime) + // Try to set step to a multiple of the statsd flush interval. + // Otherwise the returned result will be jittery. + multipleInterval := math.Ceil(timeRange/maxDataPoints/w.config.StatsdFlushInterval) * w.config.StatsdFlushInterval + step := math.Max(multipleInterval, w.config.StatsdFlushInterval) + window := fmt.Sprintf("%ds", int(step)) + + // Similar to carbon's storage aggregation strategy, but in real time. https://graphite.readthedocs.io/en/latest/config-carbon.html#storage-aggregation-conf + // Here the aggregation strategy is chosen based on queries rather than stored metrics. + // So there is no way to configure it based on the full metrics name, only the suffix. + // In future the aggregation strategy needs to be determined based on the incoming consolidateBy function, not just the pre-configuration file. + // Which would require the carbonapi_v3_pb protocol to pass the aggregation strategy. + var query string + for _, rollup := range w.config.Rollups { + if rollup.MatchSuffixRe.MatchString(request.PathExpression) { + query = fmt.Sprintf(`%s(%s[%ds])`, rollup.RollupFunc, selector, int(step)) + break + } + } + if query == "" { + query = fmt.Sprintf(`%s(%s[%ds])`, w.config.DefaultRollupFunc, selector, int(step)) + } + + // In graphite, we do the downscaling in step window size + // This is different in VictoriaMetrics, so we need to specify the window size for the calculation with max_lookback. + // https://github.com/VictoriaMetrics/VictoriaMetrics/issues/549#issuecomment-653643283 + params := req.Param{ + "query": query, + "start": request.StartTime, + "end": request.StopTime, + "step": window, + "max_lookback": window, + } + + resp, err := w.request.Get(fmt.Sprintf("%s/api/v1/query_range", w.config.PrometheusURL), ctx, params) + if err != nil { + logger.Errorf("request failed %s", err) + return + } + + defer func() { _ = resp.Response().Body.Close() }() + + // We restrict particularly large responses to queries that can use MateQL. + body, err := ioutil.ReadAll(io.LimitReader(resp.Response().Body, w.config.PrometheusMaxBody)) + if err != nil { + logger.Errorf("read response failed %s", err) + return + } + + data := new(prometheus.MatrixResponse) + err = json.Unmarshal(body, data) + if err != nil { + logger.Errorf("unmarshal %s failed %s", string(body), err) + return + } + + for _, m := range data.Data.Result { + // Sometimes the VictoriaMetrics adjustment logic return empty values that we can just ignore. + if len(m.Values) == 0 { + continue + } + + target := prometheus.ConvertPrometheusMetric(name, m.Metric) + if target == "" { + logger.Errorf("convert name:%s metric:%s to target failed", name, m.Metric) + continue + } + + start := m.Values[0].Timestamp + end := m.Values[len(m.Values)-1].Timestamp + count := (end-start)/step + 1 + + // The Prometheus response data is not continuous, we populate all intervals with Nan values. + values := make([]float64, int(count)) + var i, j int + for ; i < len(values); i++ { + values[i] = math.NaN() + searchValue: + for ; j < len(m.Values); j++ { + if start+float64(i)*step != m.Values[j].Timestamp { + break searchValue + } + values[i] = m.Values[j].Value + } + } + + // Align the start and end points of the metric with the time of the request. + // Otherwise the division calculation in carbonapi will fail. + metricStart, metricEnd, metricStep := int64(start), int64(end), int64(step) + requestStart, requestEnd := request.StartTime, request.StopTime + if metricStart < requestStart { + startStep := int64(math.Ceil(float64(requestStart-metricStart) / float64(metricStep))) + metricStart = metricStart + startStep*metricStep + values = values[startStep:] + } else { + startStep := (metricStart - requestStart) / metricStep + metricStart = metricStart - startStep*metricStep + values = append(makeNanArr(startStep), values...) + } + if metricEnd > requestEnd { + stopStep := int64(math.Ceil(float64(metricEnd-requestEnd) / float64(metricStep))) + metricEnd = metricEnd - stopStep*metricStep + values = values[:int64(len(values))-stopStep] + } else { + stopStep := (requestEnd-requestStart)/metricStep + 1 - int64(len(values)) + metricEnd = metricEnd + stopStep*metricStep + values = append(values, makeNanArr(stopStep)...) + } + + // ConsolidationFunc is the consolidation strategy chosen by carbonapi to avoid exceeding MaxDataPoints in response to data. + // It can be modified by the function consolidateBy. https://graphite.readthedocs.io/en/latest/functions.html#graphite.render.functions.consolidateBy + // But now the query step is dynamic and response points must not exceed MaxDataPoints, so this configuration or function becomes unnecessary. + consolidationFunc := "avg" + + metric := protov3.FetchResponse{ + Name: target, + PathExpression: request.PathExpression, + RequestStartTime: requestStart, + RequestStopTime: requestEnd, + ConsolidationFunc: consolidationFunc, + StartTime: metricStart, + StopTime: metricEnd, + StepTime: metricStep, + Values: values, + } + + locker.Lock() + multiResponse.Metrics = append(multiResponse.Metrics, metric) + locker.Unlock() + } + }(request) + } + + wg.Wait() + return multiResponse, nil +} + +func makeNanArr(count int64) []float64 { + arr := make([]float64, count) + for i := int64(0); i < count; i++ { + arr[i] = math.NaN() + } + return arr +} diff --git a/cmd/mateinsert/main.go b/cmd/mateinsert/main.go new file mode 100644 index 0000000..8ca1dbf --- /dev/null +++ b/cmd/mateinsert/main.go @@ -0,0 +1,183 @@ +package main + +import ( + "bufio" + "bytes" + "flag" + "io" + "net" + "net/http" + _ "net/http/pprof" + "strconv" + "sync" + + log "github.com/sirupsen/logrus" +) + +func init() { + // Register an http server with a random port to pprof + go func() { _ = http.ListenAndServe(":0", nil) }() +} + +func main() { + var logLevel, listenAddr, remoteWriteAddr string + flag.StringVar(&logLevel, "logLevel", "info", "log level") + flag.StringVar(&listenAddr, "listenAddr", ":2004", "listen address") + flag.StringVar(&remoteWriteAddr, "remoteWriteAddr", "127.0.0.1:2003", "VictoriaMetrics graphite listen address https://github.com/VictoriaMetrics/VictoriaMetrics#how-to-send-data-from-graphite-compatible-agents-such-as-statsd") + flag.Parse() + + level, err := log.ParseLevel(logLevel) + if err != nil { + log.Fatal(err) + } + log.SetLevel(level) + + listener, err := net.Listen("tcp", listenAddr) + if err != nil { + log.Fatal(err) + } + + readerPool := &sync.Pool{ + New: func() interface{} { + return bufio.NewReaderSize(nil, 64*1024) + }, + } + writerPool := &sync.Pool{ + New: func() interface{} { + return bufio.NewWriterSize(nil, 64*1024) + }, + } + builderPool := &sync.Pool{ + New: func() interface{} { + return bytes.NewBuffer(make([]byte, 1024)) + }, + } + + for { + conn, err := listener.Accept() + if err != nil { + log.Errorf("accept failed %s", err) + continue + } + + go func(localConn net.Conn) { + defer func() { + if r := recover(); r != nil { + log.Errorf("catch panic %s", r) + } + }() + + remoteConn, err := net.Dial("tcp", remoteWriteAddr) + if err != nil { + log.Errorf("dial failed %s", err) + return + } + + defer func() { _ = localConn.Close() }() + defer func() { _ = remoteConn.Close() }() + + reader := readerPool.Get().(*bufio.Reader) + defer readerPool.Put(reader) + reader.Reset(localConn) + + writer := writerPool.Get().(*bufio.Writer) + defer writerPool.Put(writer) + writer.Reset(remoteConn) + + defer func() { _ = writer.Flush() }() + + builder := builderPool.Get().(*bytes.Buffer) + defer builderPool.Put(builder) + + var next []byte + for { + line, isContinue, err := reader.ReadLine() + for isContinue && err == nil { + next, isContinue, err = reader.ReadLine() + line = append(line, next...) + } + // carbon-c-relay closes the tcp connection directly after sending. + // So io.EOF errors mean that it is closed properly. + if err == io.EOF { + return + } + + if err != nil { + log.Errorf("read from %s failed %s", localConn.RemoteAddr(), err) + return + } + + builder.Reset() + success := convertGraphite(builder, line) + if !success { + log.Debugf("ignore invalid metric %s", line) + continue + } + + _, err = writer.Write(builder.Bytes()) + if err != nil { + log.Errorf("write to %s failed %s", remoteConn.RemoteAddr(), err) + return + } + + if writer.Available() < 8192 { + err = writer.Flush() + } + if err != nil { + log.Errorf("write to %s failed %s", remoteConn.RemoteAddr(), err) + return + } + } + }(conn) + } +} + +func convertGraphite(builder *bytes.Buffer, line []byte) bool { + i1 := bytes.IndexByte(line, ' ') + if i1 < 0 { + return false + } + i2 := bytes.IndexByte(line[i1+1:], ' ') + if i2 < 0 { + return false + } + i3 := bytes.IndexByte(line[i1+1+i2+1:], ' ') + if i3 > 0 { + return false + } + + metricName := line[:i1] + metricValue := line[i1+1 : i1+1+i2] + metricTime := line[i1+1+i2+1:] + + labels := bytes.Split(metricName, []byte(".")) + if len(labels) < 2 { + return false + } + + // We will try to put the first segment of graphite in the label that follows, and the `-` is not allowed in the Label. + var prefixLabel []byte + if !bytes.ContainsRune(labels[0], '-') { + prefixLabel = labels[0] + } else { + prefixLabel = bytes.ReplaceAll(labels[0], []byte("-"), []byte("_")) + } + builder.Write(prefixLabel) + + for i := 1; i < len(labels); i++ { + builder.Write([]byte(";__")) + builder.Write(prefixLabel) + builder.Write([]byte("_g")) + builder.WriteString(strconv.Itoa(i)) + builder.Write([]byte("__=")) + builder.Write(labels[i]) + } + + builder.WriteByte(' ') + builder.Write(metricValue) + builder.WriteByte(' ') + builder.Write(metricTime) + builder.WriteByte('\n') + + return true +} diff --git a/cmd/mateinsert/main_test.go b/cmd/mateinsert/main_test.go new file mode 100644 index 0000000..a4e5430 --- /dev/null +++ b/cmd/mateinsert/main_test.go @@ -0,0 +1,30 @@ +package main + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_convertGraphite(t *testing.T) { + var success bool + builder := bytes.NewBuffer(make([]byte, 1024)) + + builder.Reset() + success = convertGraphite(builder, []byte("a 1 1")) + assert.False(t, success) + + builder.Reset() + success = convertGraphite(builder, []byte("a.b.c 11")) + assert.False(t, success) + + builder.Reset() + success = convertGraphite(builder, []byte("a.b.c 1 1 1")) + assert.False(t, success) + + builder.Reset() + success = convertGraphite(builder, []byte("a.b.c 1 1")) + assert.True(t, success) + assert.Equal(t, "a;__a_g1__=b;__a_g2__=c 1 1\n", builder.String()) +} diff --git a/cmd/matequery/main.go b/cmd/matequery/main.go new file mode 100644 index 0000000..4bb10d4 --- /dev/null +++ b/cmd/matequery/main.go @@ -0,0 +1,84 @@ +package main + +import ( + "flag" + "net/http" + "net/http/httputil" + _ "net/http/pprof" + "net/url" + "strings" + + "github.com/go-chi/chi" + "github.com/go-chi/chi/middleware" + log "github.com/sirupsen/logrus" + "github.com/zhihu/promate/prometheus" +) + +func main() { + var logLevel, listenAddr, prometheusURL string + flag.StringVar(&logLevel, "logLevel", "info", "log level") + flag.StringVar(&listenAddr, "listenAddr", ":8481", "listen address") + flag.StringVar(&prometheusURL, "prometheusURL", "", "prometheus query address") + flag.Parse() + + level, err := log.ParseLevel(logLevel) + if err != nil { + log.Fatal(err) + } + log.SetLevel(level) + + target, err := url.Parse(prometheusURL) + if err != nil { + log.Fatal(err) + } + + router := chi.NewRouter() + + router.Use(middleware.Recoverer) + router.Use(middleware.Logger) + router.Use(middleware.RealIP) + + router.Get("/check_health", func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("zhi~")) + }) + + router.Mount("/debug", middleware.Profiler()) + + router.Handle("/*", &httputil.ReverseProxy{Director: func(req *http.Request) { + req.Host = target.Host + req.URL.Host = target.Host + req.URL.Scheme = target.Scheme + req.URL.Path = singleJoiningSlash(target.Path, req.URL.Path) + reqQuery := req.URL.Query() + for _, key := range []string{"query", "match[]"} { + if queries, ok := reqQuery[key]; ok { + reqQuery.Del(key) + for _, query := range queries { + mateQuery, err := prometheus.CovertMateQuery(query, key == "query") + if err != nil { + reqQuery.Add(key, query) + log.Errorf("covert %s failed %s", query, err) + } else { + reqQuery.Add(key, mateQuery) + log.Infof("covert %s -> %s", query, mateQuery) + } + } + } + } + req.URL.RawQuery = reqQuery.Encode() + }}) + + log.Fatal(http.ListenAndServe(listenAddr, router)) +} + +func singleJoiningSlash(a, b string) string { + aslash := strings.HasSuffix(a, "/") + bslash := strings.HasPrefix(b, "/") + switch { + case aslash && bslash: + return a + b[1:] + case !aslash && !bslash: + return a + "/" + b + } + return a + b +} diff --git a/docs/arch.png b/docs/arch.png new file mode 100644 index 0000000000000000000000000000000000000000..5594be63381243118439f40a5fbf39eadff2bd2f GIT binary patch literal 27160 zcmd431yEeyvo5-EcbDK2oCp@&H3SRpB)GdvumOSxi(r9ZK@;4again|-QC@JYkvQ8 z@4Kh!)T?^uoOj<%RWg~GJ$oo^n|R zOqtszpX8@i_QbApxM0wo$Wuz3tMmQ%!{uJ-q&&iMlSWT2)lbjD!RXvK zhfyRYB`a!bdW83zLNAZkJIBUIp@j5w*r3=oHf_KOfy(pCKMGnpmA&DGxE!3EU5)!K zG1{g2i(n+(JColk9j5tZWMy-*v!VV}u1<@1(TM$)Yh0e@6GH8;x*;PYT9Aah`%`}V ziD1QKmc^j^Yg)*sZeyfK)BFDR{BX{1pI$_STG(?3qC?9=!6G$1TKS>d?8|34m>Ij; zpSJung`?r@Y;QKk=Xh=LbG(?8f&#jflob1^*xCMke6!!V`Pf&zi?t6w_L;i=|WX|sHUA?qH*kD08OgjT*unrfX6f<|`_AIrYDz zoA@81Fe!GFx87m8xw$=coFPZUr5iLn(>QPQB<}Zkdh`6w{gb4Nh_|YDQ zL1b!YhXpq4EtLRnZ#*p;v@=zW_28L%aBZys)HXZ&0Ekih+3vK*uISyxaz$-zWO6dm z)!A;0W`XL}GdLpp*vePvVX12D`1dMWR)6-Pp;o&L1&E@4hZ{ECFSXl|{>XL!W z77?pzSV{`X$B!R*J$H1$b`^AOH@I)UsdroN=;%OXPr-zXGY~rnl8(S`Zwa_|pQaIX zhCGt;Y+O$8N|5flbUtw@Vp%-=iT*zf*YK|vuHAJM7lFkz9rR*T`rIJgUPVHRI>!hebvG zF#tOjv9n{Jo}O;H`ZMKXtEAlB>{a_46yNDN@twqvAK!lbNUf`@3qj@<7K@vk7(_%w z6Vua-?CeTT*M)@-!T)u1*MayOFg%5Z!chzi47$3yvh}`MHh2Wx9HOD((HJh;?FZ4G zot>$bjOsSH7diR_28ub(HR+kh^(Qea59P{Y`QIG&0@at4kwISjm4O5$Fk!@tT|K!3QEt#g_Dz$1FZ=0aAK^)+tiE1O)=BuOM()W6~NZ=&|+AF-}EWn-rH;?5#YFPhQ6}K1!TlzkVs*6lj+q zK(h@V($3DjRaI3G1O%|RNV6dOk2Smh?yZQZsF|Hz%H3z1|uLjpKPcDt;GhHmLCKVCLaD{AHD%QLgmyH!mOOyHaABDgmUB0 zXEgm<$Jn*sA1r{~N*DFx|E|wPDd6}vJ3HHT+HSInd1-0sU|5-_{nsz-v9U4l)|{MVA_mJm^-C;G$ommR{0 zA2f7z3mb_qjP*-N!Chs5(Z>!9f9~nYkBNz?H0$KS#LC(^FiReK$zhWyZD~Hy2?aM4z6>1efmhMmC&`C&43;}{d@tvEYSm@)jlnh=eFu3rw$)Br> zgKe>#)#3k){T86~jci!NHqHj{Tp1d8Gyv5Pq|*N2p{Bj|+iWpnv!f9h@OO z-0(BA&_6IR(CPB(3R7IXHN2-R9JWOZ29YRj$HzRMDzb6UeYNWA%@4? z^j*`?(+J!Bwh*M|GUH}UJiK^gU=1(JbEZ(}m=t~IgLetQn1|8n=SY863{CV#!`{YA z%l^kp`AUt9$GEG~qW7&4D#55GSn5}51uwGXoSiEw`{{ChqyObvgr2+5GeB+Nh^Tbm z)X2-r%LZ~tE*>ZX&I$->^xE(F^87=>yLVv&pM))TCMq|Ea*%R!bK!mnGBUF1Xz@$9 zW;Hc6LC}2QHJ9hk1OhmM)-(NZ8Rgy%?b>aRmFl(ziUJ*9SX>OLoe!jx#l>Y9a;(hGL5_d?$H%`50g{WWcGUOiW4|Jv5|7KupYZ zbaW)6rS*=S-#)$0WjSKVwwgguQ1>k*pAtHK7dWV82Qx*xzuWxn0!Cj}O6rYnt>Y(h zUejRRM$gi|{(d4fJR0J7(X;eA|H~?TDcEeISIi@e9vC>M=)>@@&-cl|kwdDdr?+&p z+7I`E{O>N;83Y7$J_>uOp6tyU%rtn!z}Jj{L8uP?o%z5=Azv@Q=|-<|SZplGsceOVsQP43L21jKuoDr_)p%%BSd%nyxOa z+HRbIi3z#BzMfY=K;F`lX-2qa0aCFoG zTv(Tz3m5;>Q5|4`B*3{6f%~9%TD4R=&60z~v$L~=t}Zz+fB6Qq z;K&mTxFSE|B2X9YE{TSr!_WIfMtOW9A|yybRka81DzdV&0tuv}ql3T^33gVu+LmPa z@85oh{o#CNbPNmxXk~q!5o|Y^3L+9xWx%y3F)=X&f$>Gekn!XO-QV$D;6@d;n8srf zg2Dg)(HV&1#l z9=g1|WT2^eF(H92Iy##DSm&b$ArXAGpwyT?)hYtHoLe_Oc0wxPG37H<| zFmI~hjGQSfT-?0}G&FOq-~g8ggZVzMwu>0dq_|H}B?_d>w+|SxmR%q{;Pji2mXbm3 z{ZaoPtNgz=sV5T8fHt;G@ZtaIgll=O?eXd9C!B?3CC58gYwL}1K0Q2w(V!jpH^85s zF`RT5we`raWoBT<3dJILIX(H!iZL92nte(Y-DUgYa5<`4orqx630RU72`0tj3Rc;Y z(W=^T81@@rN@)6ar)vc;=^ayRW0msFfw+bOnaxEbq@l^wQ7`Y`{9E8Pt}QB~)e2mp zM0$HpD&hR5V^i~ddnDH=bwX^xyBm)#70yN6ua zz%khcH|mU42P1z0H);65q_r@GP5A9s{WN5ME~7;P|CEVA zM@rd<`?zVgM8{$~%|tROZ;v%x+iu6V?|?}m=7!nkU&m%B7De)_9mKu^RIyWH*C*e2 zk5bYQ?%qcAwFzp;$e_M1*6wr~l7NSGr~iz~@b6e@tP$eBZ1cb0y8qU9uhgVUWX;{b z|4*yWP8_&~eO9KYS>#au@mcs~e7u~mt1FI9emOfIVWKP8AN6o^ZWBoj75n@ehu!`7 z^IH3m(opIDc|`{NxGJV-4ZNbDo_W8}RW_>-6(9Pd^^W|}F7pjNgG4GFsLnROqSu5ZeCU4s7HE-6{|94^dE5%nTeol2p zphw_XDem>f#*?zZ!6`{+=D#(H%j zA0_su$0)W>o;UHrjG``g8BWqF|N7hqBo_B9?56{W)iMH;!sKhT^P=Wht$AK3A7fQAblNtm&_E zURl#q35XV{^Ba9UIA=K7{SKY!0h+R7?#RwZKU7q(ORB>`N1BVUN(kn!+{JodzXcD5P1 zGC>84f7|iqkjTB9gY#jqL4Rt)`kYatSX&}J!#_4~N52{~EiS9A#3TUY2^JRd%KGwd zPDs9^ouwJV6J=$Q0+$+aZZMos-6OYrdV)q^buT;h z`_%;=C3-pT1NZIbMvu;%=N}F|7kGae)sn(zf(KBO(wq?Ml`TXAG&Gfu=Vfz4>av^; z_Pi^X>j#q~MHLk_a&k)S`j=N7-#!UIrc1*63&}q;7gpwoWy8Y{s08tMeNP^KYreMo zUQ=6shd`hEtmG)`u?(ua5=;Q4i@{=C^2(8!rG4NJLQ=8or%JtAC+RoTQZ)9hM>FZy z=Ov@IiK*|xA_fNVM#Lymk!^BXnSS$mJ~(kU2hPIggn(!9@EHybZ^g{4)3Qcdi57bV zDZ_B5(0@Kc`45fwE4A={7q$d!k1fmILrojBraKxSY=Qk;-`>8iDK9TSI6gM8-qD8@ z-kE^`5)iPVf}`yJ+n0!%V{L{$pDWi;Ls; zq>y=q3{Gk-2LK?nzvr{F4QMS;{nW7>FCMqD^6K-Lq)c@@@G6RTffo)ilcLkxtLtkE z0PF#$lGpYD@PD*Wq+lOGoS}-0h8Dw`=`Hn^{E-woy^ObaUAl-*72M_bUW#Is0rAem zvl+L+C;$XStACb@YHO3I^f|V4Ib4!`gtKQmFD5-*x1A%MkCm7I*#k>>%-5_ z5cVn)^xR3`yIksy=>|YvvEN-kPbiZKbil*OiaEYoy%9qz}gooT32S zs;;a1FqKn};meCG?l61?4i4q7`qfJC8Rr!g#B_8>9R}T>2bph;6fv-{$N}`%fYYEx z<^kEO#*$F}X_BWgWERvv0uAuipHaxD{0Qg=yzr*`F@55Yx znc+u8Mddopw~UsCwudXU19wOZ4@?dL3_Ie(2P)6~IUE2(BBP>qjEoQiHxgL-PB@%9 zdzQ@d{7q~uPE&L9pD)j;PIjls;^N|*Jv?Oh=bBwMhUiz;*31F^nEv=RCIDbmR8$T+ zuy__GDlO^)Z=Zp9#>~+XH&zabii!$=Z42vJK|KKPU0hp3wX?Hp0@tGB;Eev3tse&% z#{qzOB+*0M)(6_h$H{;<4|Y?+)|MT>>T!FsjYyEItLwpl@P0Nx8UPYfX)KJVk4C_T z<#hQ%3(tLb>gBm0KR;<=Vj>8hn;i8dB@y&%?2G*Nwzi7>?CtDIK5!sH000DGgS4R` zH5l8UCLg!iXB-?H->v^3f+)<{%WJ%Zo|P4?sHjM{$@>x0=HB!A($dnz^z_17Jm}!$ zB&@WQtIV+e4G2bZtp+olU0pi>l%AN95)Ohg>Cb;4=smwfv5uQh@joLA#Zni|)-Lgg ze?Uz=RAwX$-@;l>QPBq=QhaIqx4j*Kf`S4LX@bB8q7xUV9WOWGIbCrwGeZCnPt*CF zpY=32n=V0osTB6nJQNkDW3e;RWoP1%$nD8r_|pM6oS2yz2R==^x?|V)^bDpT1mE4k zlg+bcbEzThT*G0E$%nV-yKap$Gkwj>KYn4z%m`L{r*c;Ri73aiN?vv>0X_Y zp5E5qkF~b8_HygB&p(g9C{I^t|9QPV1TZ+Dbq!{jXz-N*BY0{%Qm8RK4-zQBGj`3` zj2l3tL;bR@|L;hUl(K|{p|}-HTV`}^ds~>Qjg`cKH4HzAshNfS(aZcN`u*l^CAj6sPQmuNNe+UZU zVqPIbnC8~GS+oBYOwYx%LX0?FJ~Gfgln#l$N7P9Qx%j0&`MT)q)8KNf1xJpYD4YaM zbS6v0AX%)g*BeeHNx_!^%scVK7aUW9r-Y(AK%}E}vQcJsSJZNoe$JJBBH|swO4zeM z{um#o_=(ATQpc&)Xf~Ma=I7)h66KUe$Hf`pm9O?~HR+c&h-aZPRl=Y*x^{nDr32#0 zF7O*Ye`)`{HY|sWqcy84y&^EYCzb0mwVMO3 z8kH*e%IEAjmaX3gPst3A8WrH5zB1r<0pK493oye=IFq8+bEm&YR4tVcTtO4 zO$`|YJ+}tM8AT#HM^1MS>&d-dLf7on+{4zA|DU4i-#kwG9Aii5w$;&D*P%A zoUW{0&HU1X^K*_RJ4(dRb=mXMfnrIwkg08DAb!J>=3tpZ9ji=2?tgFrEQgSxC&`VX zuT~$G^ypOlabTVqKq9>_rPYUgtQ~d84|JaL*3~0dhlsN=_wDaWrYk(GjXX5|!tRqL zbBX|8u|6*30j9piu3yA`93v+C%bZ!>_fhrNyLVu3dV2`P#B%RrTZ?db%bJn#4C+g# zeOw3I21*cpWZNb@38$Vxv7D>Py*DjKY-Xc2A?d=bK?SI7V3XfZB9YqCF&pe{UfxDa zEQ(5Q$_8N+2}vk4wH=HR>I)#|4hPH1BTqd;v}k7jicO$UXN9Zq!Yey8IgB)Rg+AuH zVSUR3ozrR5;N?oh_?m>FdF775b*24D@&)EaDZ&I3tu?Qb($6zQE0&Dntu9m(MD!=VYZ2d}XHQe(ugRS-K7Cvb ztf;?6etg_7&M)KfAQWqq>b?sT`Dxv5PSk`TVTOR_T;$;EPUuzNvFdk3+#`R948Ctp+iHSSqa zb!YSM?Wf*iA)`xgbrS>Ah9~37d~G}wlPF?=2zcf5jH4XJwK?cMfNh-P~D@r zbbX;jL?zTmoD8Y#v*)~IywsJ+`#J6ImdyM4L)c&(rePy<)``{myMzq18w&dIv0uiS zB~QRt=Xj~J$*9nCht>Nm)rWpm5}8l^?;ka%VKUE4S1z3!&uESgV0h=3gvBTx?aMkM z3{6RPWUmER%XQ!taNNyq{UJ^tQS3ORa-FE;M`~zLOkd(?pEER_=kpO3cEfo7HH0mY z@F`2%9JE6u8e9oyrfjbDPy^;4^9h=6 z!TI+yIArU|$VbsD&O;;GG>(WX$t|Tq2UcCP{7j0O&hOtN`5)EJNFzaJY!dG8+rI7* zUh#$QAtUv%&g|-Ri5@>x;u+RLIXOyj=2pjj**(CFIiaVpv5I@+O2h1Hge!-E?Z{m| zPnFZ^n4K1Op&~SN+TfJg|Cwv0#Gdp40j!Ccr|~@lJ~aj>HJw>-T9`u`3a;Fb?Kk=e ze{8OLKc_cx9sDst+^qLX<9oRJYJVR2!1LX_$svikj^>K<$s&Xyt>3p;DJy*`({%N{ zZL+Izn)x-R%3*ToVE6~-X`Zza2`OWOghZV){)n;4N@V)red+x(ZKFCi)8eEc{`@HHlBD*Ubas#X>HWi=c?$sOx`7!QAq?+p_t(p~F`|&%j z|2Sr2*C<4pyV6`JA!3dZe88HW1flgOl`Hq7NG*rRzuz?RST)6Px5q{{AvCX>o!#^!*iW#%||cZ&NMFUv!7Q$N4zRK4nDcIKGgdvrp*ajN6BK1 z60K=@oC98}xsNuP$1(bH3hju56iCkRy5`FC$pVifjz~0caUiPL86u0E)bXPJ^n-fw zqdi`I>!zl!CQmL~xXuTELn}`AHETyRIm6uj)qH84W;VxBN<>CKIBGv~u3mEH&(^NZ zDK;HuwEe^nVVzR;F3$?f<6JOgaAuJVqEXS)s6r4Y^Py36_%snO)8F|f^nHH_$K&UF zzp-LVW&-R#mab$;dkOf|?FH)7pEHr1PmoxL)L{m$N4Kt#JxSylR@AY#R=b~RxtrK;gM{0jFkbPhqo|#SM zmY#$(9P2dfiM1;w?@)ae(PPPY1yFqJks@=|wpK=#&J#0CuJT5}i0yOz**sp$8SL!` zu*XaxGnXB(^>2DF%p-?bo*jwgdBz>dEFWl@USg=5iw!md6X+K8v9!PS{Z9=Xkx9vp z`A3Lp>>J%huwW6UI#$C><y#cVg-R?o3^P zmu#T=YaSI=4N_nQyyCji-`erC7lq7B=aRHVQIWLnt$iZ;5}NP7KH{fW9MYyucvreW z6>vBz&aFMvNLZ*V%u2_MwyH|a6;trZU$Z`zOeShBD}oe}#OQ#MnvJ(xr$zr~Nsfrk ziK*6RLCr7!`ZUF)6~*lHZ~V!ucvM%IDIX^D@w^a%TR9di83#)$$NK15`OshrC=41X zfsLGKZ!%_?M8-5a)?!{~)eJKKI2K3sQS?0y-y0u&p6T`NRAX~HCMd3Ejd|b1?Y?V2 zg7ir2$Ma1$%x$|}t2 z*q1is>5)8Cjp0nMm?^K3SSTz_+&rU<8r+h3WaPE~DUwd+N07w$-xbs0d1ZD2M#HbG z$^Dxb#Vw3_e}&Upjq7;?ZB&%XF9Ydaj{MU?elwpqu`H^*@n7QU4`;fxEtES)!En=D z_$1~kSeK@)b=Si1x;~w0!N%k$&&d)~WK=zq6T6QnN}Q5zGh!`THV<(!{B>lOxlMgr zNTG0hSM`a*`w56qzrU9)|LyX-AXb?7yqA}GwGs_(!+4h};o4cE`*|gsC$Os`v7CF{ z9J^lI8dd&pk>0-(=|g(|BTY5U(}PGopmRa$R8_7;gw{MeO~h$6YvQLz`ug0ZuxJbS z(I{0M&Pwu`1kdG%%Sjm@@?zar?}0x)^m}*D;S}q;Yg$l$M4ML7r7=~i?d9ErCk;w< zDOfw`CB|rXvBKwwK3~JbB1l~o9=2TDy=HlD8$Q!HoF7G1_0A5q_k{K~m-#&(9hZEI{lv8ToRu)EX;nP(6C%^` zPZnf^thC#QyUgER5^9gXC-1lZmEbbD)k=JzgEGMU)6F|`lXEb}wHj5QA6f2uB5y%K z*gNXWpZP~&JwMXk#oMu(9L-K4co)TJnOE)pArFZlZA%xP2FAQqz@ll@xcy+HaXR&^ zkt7R&`fZXExnNMN5kb2*ie9LbzaT`i}}^k^@8g=Aj&`FIV`6@OsrYvu#8!=yJS zGk7$S(SP4LI0TCUwQ5 z83JLqORINDMz_m`O|8I=d@2rfCAt5){8%aLz)+<$DYA;vdWHU1)trHkR?>~IDFXtA z-q8$2?lXFwDgz-^_SFWZPI!WsfuXZ7e4*pN8*%>+Vfg>VhX8W^U;dn^9iE>%fvo&> zVdXX*L+({nvfGbrkoD(An3%0~GI6sQ2i)RQ0JKoNV^So903oC})(8OjqvF`yuXq7P z(UUXqV@8J1OnNmwf%gxF=Zg>~#dxZ3_zcB^}DKXzcSq86QNys@hHT4VN zKD~=+566}&)jm&PDrygi*~fb95g5zA}I_0I$FSy)&^_e{QM6rp+m+&AUX z7JY8R>f9juK*(n_K#`7kocBGPJtewRRtC9LyX2ZPfyw6SdYih(qq-ra@(sQ6NB7yZ9ZwLC+HQ@Q{LDad-jnjcT9DiNeQrA_1tP!zxL6@;|40y1HIzaivJh zco40(CARZ|hqG~V?yxB)*`a)>UNm{j;wzXUfToK6s3EmK$2S<RL9A&k-%K$3GQ)hBaaIpb2l zM1bdPM$r|M*r^QV0M6U-RW2sSH`L9L=CKqk`@Q{r^|XqVQ=Wo7h zJSYYNOg1*@D79Se5?P!6D0Tp~1N0S=g6Tmjw#B!v`%^QZHo%Lrw0-)X0ZE`7ODc&e zz;dCc2p6t=x#pII?KG2s21_b#sHJ&bxCp*jSTO`zseye~ClYpddv5aT_3O>d#>U3Z z-@i-8wgGb77LG-hkeV7n%B2tI{@PYkLTZ3#3&_6Nfa+ImOa>-zgw2hYBO61SW7ZjA zJyJ*j(Xq2*W@Kc5`~W2?F17CR^|)>I1t8T_fTWE3_81PJ%S_Zd(E{F2rSqbcx3@Q) zurO75%N4t7rikA1DLz4>D{q}hUS`mp4^WWJ%&cb~n1KCLma?T09#X;z-FFi68vubCf<8U-kWx{>CL|=pr=*Oos}p2mVKE0Ft@m#A zI6g6P7)TbG1BO!H&!2Aq>N^*B?EoJL7&%DYFvAI*;DQdoECuNTSuHJMz(Yp?NWRGN zuO}e!1VN_V(Ijx*t+6#iaE^4O`SscE#5XHEK%TaH+bMA}WD(Cf>VeG-6e$LUu zK@r+7ZM)bJ4hVQ}@DV|l#(K7aPwei(B0asY*14j(y8DAuOVRWlJaLl#$O7=Wk)e(G zR&g-ha8bWA_?`m`lm66(5YSxZ-jc)lwx%~1hj1PmNaq2r`5OSgwK$Lo!-6@ZAV&U!_0Y`8Eirzd{hGUr5i`yw6Ag%)kxN_}JL+`1ts_$F#Jx zT|GS=;BjzD_n{RQNN+!-G}3uOhn53qb^xYd8Y?yE8yt+3`-}hqnkrJz?I{_Y=atO* zA{WrrQG;4RS`z-eh;tVoh5EX>w!eRgGBY!orLZ6)bhfpHgzrw zU3xzZ#@<1rSKLo-Sg66Gk{*0<;r*6IlyYET02UwL8%Y}o1BpN#p*r`CAq3?#?(R1z zSO@U@%J48DWh^SAd>r5+v%065+g9tJ?9aD$ZD;vz74f6`Xv%06FVRd^y&GkmE`G@| zTLqR0;PVAa*Uh!^p76wC4bU=-YW-Ni&jh2YR&pRr?7*O(BgCTvtVysJ($>{eXf|@J zAjt@b$lICs2c39J;AteyUwiISG5>U`pdXIGv#Nmp{qv@`)^YY3T=AP<+l_rCxW60> zk_ilK0Tf9X4E+x%SxE%!I&PB|PPtgh{MrCO_LQ}?E#A9S=4ocar4e};hQ9=OedY^o zAxofkrQsChmkEp-+zH@3N-1=g>obSN{e2)Pk;Hz6G^*1j*HTZOJOKifT7L(w5795x z9t9ciPasjy(RycVV}k)wVY9~|&s^4!Pmq{cA)g8(?(gko?HwQ20r|-BnTcwA^-=V* zz9hs7_(`yLYU5#|s~*1tiUO=`s`ZmWhK@(GWHg+lx|c?e{rTqjM}n8h@$oO>m=xCl z&73q6_rXs-Pe6?Gy12MlQA&E{3C4JQaBv`EF)sbH>-ztZrko(DnAFvu9R>RrpIN;W z9Ple9?##suAaepY-T%cGO^cnAclaE!l_teLnzuI)M7ujxJ@cz0x36zi?YDQQNz$-g z@nN#m2Q^KHWS86CL0afw`OMLJMqiBv2(vC+_>9fw_nH3 zm1Rb)z7AsfdOxaD!_ZO9l2h>x0~|XBl!bMaQGV1k-=%LE-zpYkcn%C}_q<0YQD|TI zQEalYrBeuEhd4g3+{m|4P*xptXuU4T;;nvR^45M1@&3~Z_P_&SROn(q&#Gc=xqG73 zhA4AqC-k^CUbbeY@5pBQ=Y7ZqFPB!$Cw_rXLJN-|=DX_$`N*K`3|A0eR#(IJ`Bwla_H ziksbfZ@V2qk*Ns^9z4po1_DmzKfaE`e_kF>9>r1cV!NjN=Q;|6>@@e5b;_&lbL^+X zEcq+?qhV$vgGY;t-jyan*yr{UMFCe!`=5jl&xn>0`m!wUcjii``Gpygz;ib;(?%Uf zR{PVR_+s;Oo5V@fez}Yb0TpUZA`?QPTCRe1%q%mi;6BRPHiCxFujYlPhN+ueB$PI-sD%Ylu_rI zKBUmW+EF>L;}ypS5pfs^+M7RPgLvZHSL6kFv_0e3=VqPt-<;h&x^D_t5h3%Aj)SoY zm-LGs`x_Tg$`{Rdlw#>eh)2gq30Y~8@Vk9BxxuTE4YL?HDzCUm*{q`=LxW&~K{WHt zt$W0tXiGYYxq0SE?+9e*lZEZ#b7 ztn-e#I9&v;kyt>?{!q+gQ}kUsSuJ0UDV1i$}=? zJvl|DzqjYi2TJ?9KONT8e6;d|6#>HK=H3wZb&-?+ z$SMLxZD+T=%8X(XHCd=`1T}V_fK=`)LGDbNhTfW3%G|_n-}dT(N$qR+`y^+~x?=C| zAinDX-I1%Ssij%SuM_u~7LO8{4?=o+3eS}~3d#on?c8hnTdlmg3ic8MvK7hNp@6Rj>pNkoLEjy6$L1O(R0+n){QJ%;F-$N3MBq?W%t_ZXb^+KzZ4AD3NN zfnP~LaIiUR;N%B{j)2PL`FWGxbWKC=z}}u*BOxC*TZR``t123zz;zGwXfety)4_WDM&I2614L+}zwZ=9(WzMr;Y9rbP~Ig~vFX z&x`q2`jK%`K9vY` z&oq!2?@rM((<5ZHJOf$5Pov?9;}P*g(_>nn|M+E+6NT=)#n#m~=)bOa$KVq742vpa zqc$|e_dMg&_uR^x7Tej;JHepTz73U>SuKW}L_tAei-QZ+gOly@?bL30`OcqXq`+>$ zJt5!e$lL4ky~_OR>baZx72(dGDK7z$pw@aD`orKJAwR8q;D0X}9duHO3_qM&`wAW+~}f#2$XN%CD*777*> zzkO@mxnla*ed#Z-qwx>>2GQsu3zQ2~bLN-hsW$rX|J)W|`TPm`WxZqgP21~{1d!=3 zCtIj1i@j(}TbW74CWXU-|BN`3V8ZMdTSk==D?GsH)2FO0%=D&-Q(+@XU z)5To~BltWbV+vTo9vlpKanLIgHegf3D*t{l^N)7YkuX5`ajRFpZQHMEx$yY@NF8?pa*i0bo?I zgE~-tTdhv{XxkXALzW0H4eRhnXUbPe<`#%*^dS zewTJN7$_=Qn_3{n*8Ak!^(!b#n@J)7rBnXe4`|S~<_sT2MNZac)pir;CNGsQgD5cm z2NyuYx7$D)Pn>q^OL3s_%IXRoBNq-ZO3Y)xvt{b(YMY#WunsE9XZ#`WdW+tK%=git z_LIF@lnrSeILL{A1ncTdfil_V?Y_J4{&n%{k)P!@1uhAJdWv%)Ak}J6Nz1|s%l|bu zF;wYoTu*n<-6KF>)k1rn_l05YCkZI%!0Z*TQb6UqKl}p)m){9tR?Ate*upmMryGy! z9+uqVEo50m{6ZK(ybn-@<@=YU~@_V-T#(Zu?j-EJAe&>%m&c#rD`BIeO zG273n^NUXMmsX(T!3l5H{q0=86 ze&2kkRZ_WiKyQ>L?W@At-u+zTAWA3pT}(_26zpYuU>gV^J$s{Z=_`dN`r{;OyML{t z^H(M&0)m>CT9@UN_0|ncgR}KPuue-AF6Dv;MaRUj8Z~(z+8K?@$lypfdhV)x{rdF? zl1R$4Vq{=Y-Kt|6))eUtxBVL|FRqaER5dLeU%k`(^=sxJaqFk=F&e&xpHKmQ0i|M6 zkI0o?j@sMpzXkEJ+}rfI5z^?3MoX*6&fQs=_~4Rgz^Ec1N$0)EF= z%S%fg`)Ixl*xXti*f=<7;G-+uCuyNk=%5ytJBg{9XObn%TU}r@p~b>EM?Xi49Zh8J zi`v5oP@~`YjT@ks5r7i;?kW)C1+nvw+_@O~=H3OVYAN)^m)ea|ISa15>O`zz-e-sW zib-$56E1HhMuD%-2f&yy`6Q-6*9IRC0(GLXTBrE}VoX_|d8C`bY+LXwN)B#rZaWHc zh)z+X6(B4P<5r?FBhK&K(l-(b0E`h(JQtlK7t$VXtAeinhl~gl3#*=5WklxXN#t*n zTlh`Pz)f7dy!IheXIYDPN~;rBMG-JcLQvt)5iX@}=FH}E&zf6o3a(;%_>i1VF-e-a zv!S7`?w!vOHvylqtgI{&lj4@q4_-U~n2h_f!6;Eck}&b--Fx0$U5Q`L4>8sn$<>Y*B|`;iUYVU&1c9JwL*QrKf2IMrr7Zz<#v$V-izr}~G7 zHA=UAZIzPpk{ilG6in(H8$XncZUu?>ma07@MGBVttf%h!kx6k;3g!lYRkl+j#!Xt( zG*l_{M6_TILYx;*6qAlTjF8nC=>W`P4beS*oYd;K5p}2r*y{wFIR8YX!&HEd2S9iM zazo0TlN0xy>Dm|Luiqu5rA2}QK+3^n(Dwle4VR9ezlO9JF^*O|4!Cgo88I-}7HOz5 z#=JMr0Dtd*88X@;c5Wi(~UMFu;MYi5iCwGi&?%aKH%QOpW|XObofGaqamCilB)@ zGzo|E?i8Qkvu7QkVv?ujf&p3p{U9d3+YtS$BwHTTpJD<8ct4kxoE!#9TPZ-34R~#& z7$99kZ2(%S09`clzI^EbyiveJjnpVYh76m$2|z_A8|b+)RP|aJ%mQ8;d*lyD&mEv* z5#TxqPEJk$9O{^uAcyd&sow&K1r$Jb0_X~qbXI)(1}~sY05nrjdk>Tn5i=YyBqRiM zD-eXQIw8QabnBeW>a*Gbq?C}H9138Yeg~(POBQ3_P3G_RlZ5?gJmZv3uU;WPdGZZY zeEjDYA`MNC{&#BvZo|4@0P>mr$(03#x$>*`!vKDHp9@+wG=aA<9@}G4pa>r@lOOiP zJ>UQoX>Neu!>RGI*4Bjps{#E>;NVy^K!Gvn!vL6pkAQ%%1aKn;Zf;c>d3h{6JiJQ# z$;XMQsVuplKmSv&3$V#>d@0by0R~DzwGHc>B|$-NER{ePC|Bl9G)jS2 z_5Q;)54@=Z&jR(j|B&jzfYjC0=-@^9|3*&-q}fPvLFa5im!&XpAt+{!1`8HVCEy6S z>w>N;Ft9v1g?#|<=>`a%tcyz}xC*Rg3?LuJ0G7Rvikw`8p#Mb$IwmF|yz7Iui5`22 z9q80x4sfl1rXwy6!nfweS2#_zxazwJhK7b=z~&Bl(C5v2$0;Kbkdl7s8yM&TAYP6} zk*SqcB$x;skyXkkY;SJ|gQE+>uc)k)Yz@2xKs0-+IH-LELSX&(3l2mFxZv=T)%<*B zc&iMXzh4#su^9@6w>1G~Mm9?<0D>L>S2Hb*40N<$9u)97dZnwU*A8%Lz{*dqT?G}_ zp#KOt=&q5()D9#UfMJRAR-my0m>?1a+9AMs_dbm;|5V%QfQAzbpsNTPbaSyB4+?$X zBqk<`um}i{{rU5!&U2R%Ty2!^!%SDo$23J--g12LEa*8i3EMy;l4zgXL?fD^~ zOG2{m`5unq)jSTf-{}^Lj*ia56s556aI?FcOHi5d80=yU=wbm%D}GvBpKcfMWrc`R(S5} zt7JSz0k~dQ@yn9$TN|?m*OCGpVa59omL004X`V-z5t z0L~-}fFOhKLHUcEscH7i{l%ydz&fP>5uW2yEfdgmcu54j&R8lZVUSM`Xd!?QBS8WG zPYb`ZohX1xn*U0V&ibRRX9RvXycHi66_S-@QoY5(klWBpl7`wbJ8P5-vN0ehfdGM2 zWQFgkExeYwpx`t3$kiGH3#$`UpKyFEud5>m$`Ap-@`Hnf{?Exy0sj7Q@hi`LWo5No zkQG=woFW8@N#OGNPKg=RE>{9nw)75Y&H`9Oh$f-{G|?~{%9b3xV1hv6Lr_R)F%(a1 zNZ(;|wo#zQX+G}jS564;h2@2LUX>Wc^8TDA|!tm*4V`IYrA_b})%C%=f8P9p$ z(y#nj0v!Y`1N9M?+JjU1{?NI-Fa1if&Z?-c}F?r~# z-V;zkfClmjr5dv-qpQoyp+G5fz!2fRK>(t@QUaXK!o>xmXH+ox7m=h=bYQX|hp6`O zJ6RV;8rn{$q1<2HIlZo7>CF*W%8T@WO~daaHsX(79k@VxrH3jF>nZq*g|B^EIMC zWsP=Gnf&q8)YQ+k*w7b|EX7>@&z_EAmeoB4ndDFG`$`nb%Kf&3$4Hduzek81tTLl)89$1ZbnX z5C@w9mG}TsamA8fwQgSOlz@^OCwC((b)p^;vSJcLmmGZ}0s@zg$;o=%q`YEUuVQe5 z6R^4)Apt$bHGT=t$NT=1=Y*a7t2z$uUKGDfVmizT1EOUGN(Epr1*d=Y_?Y`wIa)Kw zW&el20~X+aaZCRB;l|455C#Uu{-$}O*1239H3l>RdQLQgT`7K z>=BQO`Y(;||JB5_UW59cUXZbMHxju*SBp!>ZRxEPm-If?eYe0=@|Xe z|Esn$4aai-_V|s65Vj;LQj#dcmLZ9fo|1NyA(2@`MUn^^LWTyJH6WxyNO??|O2(Az zNP`|ELn^b(obRvyx&G(6_Bk)kd2#luc6y$N`~KZ)eb;xb^-+!IubprESvjfilz&JIZ=M-u6==Y(34yiFK5LJn#2-H5`3@xQ=f*w z_Nx1IQ{INnKuQ2PAs1o~B4RNZ9)aoU8`1pip0KmGw*kuu_l51s)vLfIg<>74vbL5T ztvF3Qxh?<%wUv4aZZ^$-@$kr#H@P-@s2yN;F|)9Ac68hc(2awrPX@BI}djPZ$PU?|M7Y$Ju;EEN9pn>2gsZaN6S-ib6}X=&*P#g+;ohGo5rKyazg{7}eqY*~W1gXSO5-DHb-S$8Bi0qdiLyaF8#XjyztJ zD`U3A#>VFS?c`*u^eKQ<=;k$5Rmn(?gpP9Hz*2nJ*D~yZku!*9xxhPPJv~44 z$vd2VXIA1I1wekj>@8CRDMUZx8g!09$*7iAbDvz{mNS4_)sPLps3;bS9oZXa!OGS) z5R{r!TtU#g{AsDru>05TdZ6C5tD2i!tBT4|BcHkP!-YjfgkWN41O1l%&K_{E>2PvqKtbuS zSeTCUZn=v4K0iNE&O#v`h8kuXiK(gzLn=&uYM;M^I>Sh%9h<^K-EkI}FTrt!C|Qk0 zHAOWfY!f6GjC8M82n(k`gJAjZqPVueYX=91P)vt(UR7r7z>$ZF_rnJsPv{yU+njR~ z?ZHrBc>Lv0yxWx30vE?oSZe<2p*@)12$aeH6y4p^lk(^h?`D%c$(FdAH6cvL)L$%Y(AV;wB_w4!e6cfds-riLF^H;Z&TOAiV0(d|&zJRI<(mcB(5T9J1zs@_ZzrbvnGdGvm9d~;J%?t$L>_x!Qi|Ib8m1TvZ2=P8#} z$BIsOG7q#B>Dw#fihn^6n*8<2nd}H-V#i^e>>ffyyX9YSd`~js-dL2`SYZY?sjy)l zrCP~!f2Im{y<>VnF_L|qxZSk-Cz+}NL5ob{5bUILyZYqx^cRFF6W7=h9c9JMYFqJR z?t`q_0^HEP&v_#l74c6`cZlgFI&-v9!y+x0FwRgrlhXA-!o2(Z0LnQz+(eN0j&HAx zxhati^X}kA27@s<@N_WmP8 zYgNf6xoin>8xUK_h0gVv^CY#rh3}lAUoelJn~TfFCn5-RN6XC*7x;=@cn;es11AoP zy~Q#CA&#{eh#`5gQDcFN!cRe{ksX7BIf}0sk z{HcH!Dhez`0wck;d5ZS@PnNW#03m+94jDOn=e7kZ9MEei|w<+Qi6i`{CW zsmYApYNX3afcPJXMUXiqlWGp35rJaI262p?hi4B`Zol0g?VLXGLd@q(72MQJ z=JlmJY~P%x^hqawyYS(Z<==*Xja0YNyvzpd4L@c+UQl$Sf-%J+>9H!TIA zCPFTQh#an$&WRT0|7ZF1Zi&Qt1np-?O@LZmPa4WL%7hic9;E5-GdJCd3qfX-sEUdT z3ULmAt@YN{)^eviDvCcH`Fo`*hYWVeb=-2zGQ8m94Kh_cIG9=%9uiV`_dDhj?PllZ zhEcSvm6heeLb7B_68sh97R_`6P{0JC3*Po0PB%c_WT8mKhbUixD=!bKB~<~Jjl^d> zTh^e5Vi8;o0wN4-wOz@29W93~s-$yDFhIz>|F%l*fP1{K8iEA98jLRW%mb-SB@bSj zggiKhF_1M#TzL2}%we!VjvO}bOMZPkL2^EvI{p4RNbeem8Is)*g00B#5Kve6)+27+ zlFoS(fAc08C@Mr)xWpyIY++$hGCT4TC0B50CSq5GgYoe>osEsibXqB$THzn}~o`3@0t88D@c z{ihy&Z@k`b^lftgSRnA3{P1GibheyF;!fN+!L8NY!7et!)Hf>Y%dZ`|3S4nn8gn<&F1w*-tSi=7H^w3;R zkM$G4A!CEVWB(>{yh2c$rIP7(^z2+5mrV; z!lxaXG1jb*co(=ZIx&Vbo>yvx;}QA&QhA2oHAlx>TB^L)m}asb7aMhz<~ow=V*k0V z?S~KGw0EbauOj|GU=9~~{D9ON&mt!0cF_N)tg!a~Qr`TddT zig9|ViVq*A@~-Km%kWUI#I@^N-NbMO)Y$~*I+DLLXVl}XP2ryU3NUtgM1Dg*{{8vE zVPjgTSP4(m*4DP7sfiC#L{G9`!d*neXB7hN4*0VB#vQf@zkpss4NWh8b@#6L(K2r# z9uYM%*!HH>%N?=zUB41?)`Ck_nm@m$B`U^v^$V(SB2PoAJT%T_h7}5aTz1qc5d?3H z=Se>qoShma&jY+dCJG)kA*?2Pm_qRq;jHQ=rMYVtpQva!B3D<;Ve@Cue2E_BkQKM# zMO6H3FBb*v^nIz9@=axDnSvbn5<0P)o?YTz*-uYIg!+Pk%V2N_s5Ov2E-o&*YB;7d zWR4WxDjV7dXJ%kC%K6TF!!Zh$metY{?+VjlDL|Sind-S^lFUkhxEcU~x--kD3zdb( z_pAakD@g2!sgZ8V4>^Djy_gzArbo7Qe-K$moyJ($B34!_lr)gvm5}(uIlgn8!dGdD z8VU;hxGn+U_QE&51Z#(x5>fM5(^!rdpZt{1t+6nx0G{Iz<~W%tpQ&y^lBR1{fRK_LlX5oJw1JpWrJv`d;+1@t}TbN zzl`8Y4rd?bBZv}Q0_ztce8A`J#4|zYf+U=tpGAxB1-xJhG^2K729RJ_a90Xo(gJZD z0%t4?KbSr=PUxLGYliDKB*E6jQCJRA#UE%#L`AIwNO3LUoIs)wU<=$u!YknylsU>v z0vHTpav9bF5o+)mA~q#u&a~X8bQ0Dz3}a0EhBP-D6b)in(Hc~4&}EdI8Jp}H;>8du zwczDU&W<)#!9l@4CWE5n1yJ*sA*qP5jRPYAjHgtiqD?gyp_PaQ$~BFbyQa|gpFxO4 z^3iM!y6!{HMr>l@Qsnc!k>UlLQGN1D?7zNHXa5IgRa`0`5Cy&5RSj@E4 zow%lm_vGU!(rb{=P2>vFWJ3A}nX?0+laX`a=%V`LiNA4!KuzplN0%*5+?RF{S>Vc* zE0%zxz$?O|X9coL-`o2mo*d zakHNxuj4)ZV`5^|%3$(>jT7CADloGR&bDb9#YDlfAp_6{ZmWnDba!_HU!2u!?B(U9 zR_2Q@sel?yl02T`RM;Q_PVm5aNCg8}XCAX2ez5MSxX+(I(-{glg-F$WuzV|EDnMnj z+SqhS{S-yGLmP_44uvhe$uDS;tk`-+oT@R@b8($(J8TCc>kJGG z(i^K^zg`Mo`~`Ffrj)H1%cLboghE{$5_!uXirmsHG#!v)!UculY5wQjgoeZ<2eQBCbPT8}bxaXKp zzh0tAT6rL=oIm#9;T%wx)o`Gh)VLJTPGKW12!@R<%w5n7t@-wrN7OeOZ7Lkdy5C%7 zGBOXrNp+y@VVr;R#;WJ_e>%0P5SBU$yWMdzW*MdJ=pwE>>f+GhIo=&k6-Q zE|k=ycGuU}zapwZV(4=yNqiJx%S=h#Llh&YUtL|DJkZD}(1ub+6-S1KDo{1aA<#Gu zBDM0x>|~EPJR)_gq(qHRP;dkESI}Xhm}p2bL`M2YxG3GySMW_}nV6Ww^k!xNpo5?N zP}T%g|8cXeV<^VMP&Q@BBX>woNZkcY3SREPv{RqUeJ!{|)QsW!$DQmk+^yvnd|2RM zR!7)n5qY4&A?Gv3MNCaUEtxTt5-^Z5{HtCnDmT}KuIq83Bo-!LtT2L&{=kfAx#0fY zh~kS;MniiVWzyh8ZDxzR^fRb9ev6FE>+0s_=9Lvu?V07Pkk>T0DJ0^=G*!_Tu?NSt5ib7e3ODoH5RVi&C4^% zhz;4h5|`wF%C>E?7fMbwg)MOPzbWIkLn|yGjNPN~?>{jd>MiG`G;6*>s-UK=^l~D< zcT;2f2DYcg>P>C!b0eJU@d_)ul6=eZ>(kh*D_dvy+?A(pJf1I?$N(1BU;`M{gTwhY zF%5e5_Z=v#2 zpTdlmFOOfSyRYIqE(MC@kXI*b!*GJ%LZXU)c=_}a@p#q9bL!`hM_si%g?xgS@8)e;wRAgYuZ-DH^~)%^+A3D+^nAVH zCY}$0FO{8>qf*!1yCTf7AZY$zqVcjwq&SDhZLaUK+J}72S~Vg2>8Bjduimx2jg4i+ zQHvlw2f>^VKNcXET4Rnd`d8T8M%S$uNFG^V#lseS;DCZ2&`_Jwhb|gR&Ew zZ;iX><>y~Il zPzGb@{Dnldc7@X0asx&7OjORb)t9RlIA}-D{Aprp+I4?ncI7W6B~0Tos@Zl;7j>QP zJ5r*0ZhwN0xnLX6B8-F#Dt2ys?=>2bfynhtB|BD8o$4lS8IqGI}8aCDUZ@O=GxJ$%2r(Xq89b0<_ z3k^kYAWL": true, + "<": true, + ">=": true, + "<=": true, + + // logical set ops + "and": true, + "or": true, + "unless": true, + + // New ops for MetricsQL + "if": true, + "ifnot": true, + "default": true, +} + +var binaryOpPriorities = map[string]int{ + "default": -1, + + "if": 0, + "ifnot": 0, + + // See https://prometheus.io/docs/prometheus/latest/querying/operators/#binary-operator-precedence + "or": 1, + + "and": 2, + "unless": 2, + + "==": 3, + "!=": 3, + "<": 3, + ">": 3, + "<=": 3, + ">=": 3, + + "+": 4, + "-": 4, + + "*": 5, + "/": 5, + "%": 5, + + "^": 6, +} + +func isBinaryOp(op string) bool { + op = strings.ToLower(op) + return binaryOps[op] +} + +func binaryOpPriority(op string) int { + op = strings.ToLower(op) + return binaryOpPriorities[op] +} + +func scanBinaryOpPrefix(s string) int { + n := 0 + for op := range binaryOps { + if len(s) < len(op) { + continue + } + ss := strings.ToLower(s[:len(op)]) + if ss == op && len(op) > n { + n = len(op) + } + } + return n +} + +func isRightAssociativeBinaryOp(op string) bool { + // See https://prometheus.io/docs/prometheus/latest/querying/operators/#binary-operator-precedence + return op == "^" +} + +func isBinaryOpGroupModifier(s string) bool { + s = strings.ToLower(s) + switch s { + // See https://prometheus.io/docs/prometheus/latest/querying/operators/#vector-matching + case "on", "ignoring": + return true + default: + return false + } +} + +func isBinaryOpJoinModifier(s string) bool { + s = strings.ToLower(s) + switch s { + case "group_left", "group_right": + return true + default: + return false + } +} + +func isBinaryOpBoolModifier(s string) bool { + s = strings.ToLower(s) + return s == "bool" +} + +// IsBinaryOpCmp returns true if op is comparison operator such as '==', '!=', etc. +func IsBinaryOpCmp(op string) bool { + switch op { + case "==", "!=", ">", "<", ">=", "<=": + return true + default: + return false + } +} + +func isBinaryOpLogicalSet(op string) bool { + op = strings.ToLower(op) + switch op { + case "and", "or", "unless": + return true + default: + return false + } +} + +func binaryOpEval(op string, left, right float64, isBool bool) float64 { + if IsBinaryOpCmp(op) { + evalCmp := func(cf func(left, right float64) bool) float64 { + if isBool { + if cf(left, right) { + return 1 + } + return 0 + } + if cf(left, right) { + return left + } + return nan + } + switch op { + case "==": + left = evalCmp(binaryop.Eq) + case "!=": + left = evalCmp(binaryop.Neq) + case ">": + left = evalCmp(binaryop.Gt) + case "<": + left = evalCmp(binaryop.Lt) + case ">=": + left = evalCmp(binaryop.Gte) + case "<=": + left = evalCmp(binaryop.Lte) + default: + panic(fmt.Errorf("BUG: unexpected comparison binaryOp: %q", op)) + } + } else { + switch op { + case "+": + left = binaryop.Plus(left, right) + case "-": + left = binaryop.Minus(left, right) + case "*": + left = binaryop.Mul(left, right) + case "/": + left = binaryop.Div(left, right) + case "%": + left = binaryop.Mod(left, right) + case "^": + left = binaryop.Pow(left, right) + case "and": + // Nothing to do + case "or": + // Nothing to do + case "unless": + left = nan + case "default": + left = binaryop.Default(left, right) + case "if": + left = binaryop.If(left, right) + case "ifnot": + left = binaryop.Ifnot(left, right) + default: + panic(fmt.Errorf("BUG: unexpected non-comparison binaryOp: %q", op)) + } + } + return left +} + +var nan = math.NaN() diff --git a/mateql/binary_op_test.go b/mateql/binary_op_test.go new file mode 100644 index 0000000..8dc60e5 --- /dev/null +++ b/mateql/binary_op_test.go @@ -0,0 +1,125 @@ +package mateql + +import ( + "testing" +) + +func TestIsBinaryOpSuccess(t *testing.T) { + f := func(s string) { + t.Helper() + if !isBinaryOp(s) { + t.Fatalf("expecting valid binaryOp: %q", s) + } + } + f("and") + f("AND") + f("unless") + f("unleSS") + f("==") + f("!=") + f(">=") + f("<=") + f("or") + f("Or") + f("+") + f("-") + f("*") + f("/") + f("%") + f("^") + f(">") + f("<") +} + +func TestIsBinaryOpError(t *testing.T) { + f := func(s string) { + t.Helper() + if isBinaryOp(s) { + t.Fatalf("unexpected valid binaryOp: %q", s) + } + } + f("foobar") + f("=~") + f("!~") + f("=") + f("<==") + f("234") +} + +func TestIsBinaryOpGroupModifierSuccess(t *testing.T) { + f := func(s string) { + t.Helper() + if !isBinaryOpGroupModifier(s) { + t.Fatalf("expecting valid binaryOpGroupModifier: %q", s) + } + } + f("on") + f("ON") + f("oN") + f("ignoring") + f("IGnoring") +} + +func TestIsBinaryOpGroupModifierError(t *testing.T) { + f := func(s string) { + t.Helper() + if isBinaryOpGroupModifier(s) { + t.Fatalf("unexpected valid binaryOpGroupModifier: %q", s) + } + } + f("off") + f("by") + f("without") + f("123") +} + +func TestIsBinaryOpJoinModifierSuccess(t *testing.T) { + f := func(s string) { + t.Helper() + if !isBinaryOpJoinModifier(s) { + t.Fatalf("expecting valid binaryOpJoinModifier: %q", s) + } + } + f("group_left") + f("group_right") + f("group_LEft") + f("GRoup_RighT") +} + +func TestIsBinaryOpJoinModifierError(t *testing.T) { + f := func(s string) { + t.Helper() + if isBinaryOpJoinModifier(s) { + t.Fatalf("unexpected valid binaryOpJoinModifier: %q", s) + } + } + f("on") + f("by") + f("without") + f("123") +} + +func TestIsBinaryOpBoolModifierSuccess(t *testing.T) { + f := func(s string) { + t.Helper() + if !isBinaryOpBoolModifier(s) { + t.Fatalf("expecting valid binaryOpBoolModifier: %q", s) + } + } + f("bool") + f("bOOL") + f("BOOL") +} + +func TestIsBinaryOpBoolModifierError(t *testing.T) { + f := func(s string) { + t.Helper() + if isBinaryOpBoolModifier(s) { + t.Fatalf("unexpected valid binaryOpBoolModifier: %q", s) + } + } + f("on") + f("by") + f("without") + f("123") +} diff --git a/mateql/binaryop/funcs.go b/mateql/binaryop/funcs.go new file mode 100644 index 0000000..9a123e2 --- /dev/null +++ b/mateql/binaryop/funcs.go @@ -0,0 +1,104 @@ +package binaryop + +import ( + "math" +) + +var nan = math.NaN() + +// Eq returns true of left == right. +func Eq(left, right float64) bool { + // Special handling for nan == nan. + // See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/150 . + if math.IsNaN(left) { + return math.IsNaN(right) + } + return left == right +} + +// Neq returns true of left != right. +func Neq(left, right float64) bool { + // Special handling for comparison with nan. + // See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/150 . + if math.IsNaN(left) { + return !math.IsNaN(right) + } + if math.IsNaN(right) { + return true + } + return left != right +} + +// Gt returns true of left > right +func Gt(left, right float64) bool { + return left > right +} + +// Lt returns true if left < right +func Lt(left, right float64) bool { + return left < right +} + +// Gte returns true if left >= right +func Gte(left, right float64) bool { + return left >= right +} + +// Lte returns true if left <= right +func Lte(left, right float64) bool { + return left <= right +} + +// Plus returns left + right +func Plus(left, right float64) float64 { + return left + right +} + +// Minus returns left - right +func Minus(left, right float64) float64 { + return left - right +} + +// Mul returns left * right +func Mul(left, right float64) float64 { + return left * right +} + +// Div returns left / right +func Div(left, right float64) float64 { + return left / right +} + +// Mod returns mod(left, right) +func Mod(left, right float64) float64 { + return math.Mod(left, right) +} + +// Pow returns pow(left, right) +func Pow(left, right float64) float64 { + return math.Pow(left, right) +} + +// Default returns left or right if left is NaN. +func Default(left, right float64) float64 { + if math.IsNaN(left) { + return right + } + return left +} + +// If returns left if right is not NaN. Otherwise NaN is returned. +func If(left, right float64) float64 { + if math.IsNaN(right) { + return nan + } + return left +} + +// Ifnot returns left if right is NaN. Otherwise NaN is returned. +func Ifnot(left, right float64) float64 { + if math.IsNaN(right) { + return left + } + return nan +} diff --git a/mateql/doc.go b/mateql/doc.go new file mode 100644 index 0000000..480a48f --- /dev/null +++ b/mateql/doc.go @@ -0,0 +1,7 @@ +package mateql + +// Copyright 2019-2020 VictoriaMetrics, Inc. +// [Apache License 2.0](licenses/metricsql.license.txt) + +// Fork from https://github.com/VictoriaMetrics/metricsql +// We modify lexer.go#scanIdent to support the parsing of graphite target. diff --git a/mateql/lexer.go b/mateql/lexer.go new file mode 100644 index 0000000..ded50e1 --- /dev/null +++ b/mateql/lexer.go @@ -0,0 +1,644 @@ +package mateql + +import ( + "fmt" + "strconv" + "strings" +) + +type lexer struct { + // Token contains the currently parsed token. + // An empty token means EOF. + Token string + + prevTokens []string + nextTokens []string + + sOrig string + sTail string + + err error +} + +func (lex *lexer) Context() string { + return fmt.Sprintf("%s%s", lex.Token, lex.sTail) +} + +func (lex *lexer) Init(s string) { + lex.Token = "" + lex.prevTokens = nil + lex.nextTokens = nil + lex.err = nil + + lex.sOrig = s + lex.sTail = s +} + +func (lex *lexer) Next() error { + if lex.err != nil { + return lex.err + } + lex.prevTokens = append(lex.prevTokens, lex.Token) + if len(lex.nextTokens) > 0 { + lex.Token = lex.nextTokens[len(lex.nextTokens)-1] + lex.nextTokens = lex.nextTokens[:len(lex.nextTokens)-1] + return nil + } + token, err := lex.next() + if err != nil { + lex.err = err + return err + } + lex.Token = token + return nil +} + +func (lex *lexer) next() (string, error) { +again: + // Skip whitespace + s := lex.sTail + i := 0 + for i < len(s) && isSpaceChar(s[i]) { + i++ + } + s = s[i:] + lex.sTail = s + + if len(s) == 0 { + return "", nil + } + + var token string + var err error + switch s[0] { + case '#': + // Skip comment till the end of string + s = s[1:] + n := strings.IndexByte(s, '\n') + if n < 0 { + return "", nil + } + lex.sTail = s[n+1:] + goto again + case '{', '}', '[', ']', '(', ')', ',': + token = s[:1] + goto tokenFoundLabel + } + if isIdentPrefix(s) { + token = scanIdent(s) + goto tokenFoundLabel + } + if isStringPrefix(s) { + token, err = scanString(s) + if err != nil { + return "", err + } + goto tokenFoundLabel + } + if n := scanBinaryOpPrefix(s); n > 0 { + token = s[:n] + goto tokenFoundLabel + } + if n := scanTagFilterOpPrefix(s); n > 0 { + token = s[:n] + goto tokenFoundLabel + } + if n := scanDuration(s, false); n > 0 { + token = s[:n] + goto tokenFoundLabel + } + if isPositiveNumberPrefix(s) { + token, err = scanPositiveNumber(s) + if err != nil { + return "", err + } + goto tokenFoundLabel + } + return "", fmt.Errorf("cannot recognize %q", s) + +tokenFoundLabel: + lex.sTail = s[len(token):] + return token, nil +} + +func scanString(s string) (string, error) { + if len(s) < 2 { + return "", fmt.Errorf("cannot find end of string in %q", s) + } + + quote := s[0] + i := 1 + for { + n := strings.IndexByte(s[i:], quote) + if n < 0 { + return "", fmt.Errorf("cannot find closing quote %ch for the string %q", quote, s) + } + i += n + bs := 0 + for bs < i && s[i-bs-1] == '\\' { + bs++ + } + if bs%2 == 0 { + token := s[:i+1] + return token, nil + } + i++ + } +} + +func scanPositiveNumber(s string) (string, error) { + // Scan integer part. It may be empty if fractional part exists. + i := 0 + for i < len(s) && isDecimalChar(s[i]) { + i++ + } + + if i == len(s) { + if i == 0 { + return "", fmt.Errorf("number cannot be empty") + } + return s, nil + } + if s[i] != '.' && s[i] != 'e' && s[i] != 'E' { + return s[:i], nil + } + + if s[i] == '.' { + // Scan fractional part. It cannot be empty. + i++ + j := i + for j < len(s) && isDecimalChar(s[j]) { + j++ + } + if j == i { + return "", fmt.Errorf("missing fractional part in %q", s) + } + i = j + if i == len(s) { + return s, nil + } + } + + if s[i] != 'e' && s[i] != 'E' { + return s[:i], nil + } + i++ + + // Scan exponent part. + if i == len(s) { + return "", fmt.Errorf("missing exponent part in %q", s) + } + if s[i] == '-' || s[i] == '+' { + i++ + } + j := i + for j < len(s) && isDecimalChar(s[j]) { + j++ + } + if j == i { + return "", fmt.Errorf("missing exponent part in %q", s) + } + return s[:j], nil +} + +// Fork from https://github.com/VictoriaMetrics/metricsql/blob/5f227b4bf5679c28c59362aad96d196a8e4a986b/lexer.go#L204 +// Modify for support graphite Target. + +func scanIdent(s string) string { + i := 0 + isGraphite := false + guess := -1 +scanIdentLabel: + for i < len(s) { + if s[i] == '.' { + if i < len(s)-1 && s[i+1] == '.' { + break scanIdentLabel + } + isGraphite = true + } + if isIdentChar(s[i]) { + i++ + continue + } + if isExtendGraphiteIdentChar(s[i]) { + if !isGraphite && guess < 0 { + guess = i + } + i++ + continue + } + switch s[i] { + case '\\': + // Do not verify the next char, since it is escaped. + i += 2 + if i > len(s) { + i-- + break scanIdentLabel + } + case '*': + if !isGraphite { + break scanIdentLabel + } + i++ + case '{': + if !isGraphite { + break scanIdentLabel + } + j := scanValueList(s[i:]) + if j < 0 { + break scanIdentLabel + } + i += j + case '[': + if !isGraphite { + break scanIdentLabel + } + j := scanCharRange(s[i:]) + if j < 0 { + break scanIdentLabel + } + i += j + default: + break scanIdentLabel + } + } + if i == 0 { + panic("BUG: scanIdent couldn't find a single ident char; make sure isIdentPrefix called before scanIdent") + } + if !isGraphite && guess >= 0 { + return s[:guess] + } + return s[:i] +} + +func unescapeIdent(s string) string { + n := strings.IndexByte(s, '\\') + if n < 0 { + return s + } + dst := make([]byte, 0, len(s)) + for { + dst = append(dst, s[:n]...) + s = s[n+1:] + if len(s) == 0 { + return string(dst) + } + if s[0] == 'x' && len(s) >= 3 { + h1 := fromHex(s[1]) + h2 := fromHex(s[2]) + if h1 >= 0 && h2 >= 0 { + dst = append(dst, byte((h1<<4)|h2)) + s = s[3:] + } else { + dst = append(dst, s[0]) + s = s[1:] + } + } else { + dst = append(dst, s[0]) + s = s[1:] + } + n = strings.IndexByte(s, '\\') + if n < 0 { + dst = append(dst, s...) + return string(dst) + } + } +} + +func fromHex(ch byte) int { + if ch >= '0' && ch <= '9' { + return int(ch - '0') + } + if ch >= 'a' && ch <= 'f' { + return int((ch - 'a') + 10) + } + if ch >= 'A' && ch <= 'F' { + return int((ch - 'A') + 10) + } + return -1 +} + +func toHex(n byte) byte { + if n < 10 { + return '0' + n + } + return 'a' + (n - 10) +} + +func appendEscapedIdent(dst []byte, s string) []byte { + for i := 0; i < len(s); i++ { + ch := s[i] + if isIdentChar(ch) { + if i == 0 && !isFirstIdentChar(ch) { + // hex-encode the first char + dst = append(dst, '\\', 'x', toHex(ch>>4), toHex(ch&0xf)) + } else { + dst = append(dst, ch) + } + } else if ch >= 0x20 && ch < 0x7f { + // Leave ASCII printable chars as is + dst = append(dst, '\\', ch) + } else { + // hex-encode non-printable chars + dst = append(dst, '\\', 'x', toHex(ch>>4), toHex(ch&0xf)) + } + } + return dst +} + +func (lex *lexer) Prev() { + lex.nextTokens = append(lex.nextTokens, lex.Token) + lex.Token = lex.prevTokens[len(lex.prevTokens)-1] + lex.prevTokens = lex.prevTokens[:len(lex.prevTokens)-1] +} + +func isEOF(s string) bool { + return len(s) == 0 +} + +func scanTagFilterOpPrefix(s string) int { + if len(s) >= 2 { + switch s[:2] { + case "=~", "!~", "!=": + return 2 + } + } + if len(s) >= 1 { + if s[0] == '=' { + return 1 + } + } + return -1 +} + +func isInfOrNaN(s string) bool { + if len(s) != 3 { + return false + } + s = strings.ToLower(s) + return s == "inf" || s == "nan" +} + +func isOffset(s string) bool { + s = strings.ToLower(s) + return s == "offset" +} + +func isStringPrefix(s string) bool { + if len(s) == 0 { + return false + } + switch s[0] { + // See https://prometheus.io/docs/prometheus/latest/querying/basics/#string-literals + case '"', '\'', '`': + return true + default: + return false + } +} + +func isPositiveNumberPrefix(s string) bool { + if len(s) == 0 { + return false + } + if isDecimalChar(s[0]) { + return true + } + + // Check for .234 numbers + if s[0] != '.' || len(s) < 2 { + return false + } + return isDecimalChar(s[1]) +} + +func isPositiveDuration(s string) bool { + n := scanDuration(s, false) + return n == len(s) +} + +// PositiveDurationValue returns the duration in milliseconds for the given s +// and the given step. +func PositiveDurationValue(s string, step int64) (int64, error) { + d, err := DurationValue(s, step) + if err != nil { + return 0, err + } + if d < 0 { + return 0, fmt.Errorf("duration cannot be negative; got %q", s) + } + return d, nil +} + +// DurationValue returns the duration in milliseconds for the given s +// and the given step. +// +// The returned duration value can be negative. +func DurationValue(s string, step int64) (int64, error) { + n := scanDuration(s, true) + if n != len(s) { + return 0, fmt.Errorf("cannot parse duration %q", s) + } + + f, err := strconv.ParseFloat(s[:len(s)-1], 64) + if err != nil { + return 0, fmt.Errorf("cannot parse duration %q: %s", s, err) + } + + var mp float64 + switch s[len(s)-1] { + case 's': + mp = 1 + case 'm': + mp = 60 + case 'h': + mp = 60 * 60 + case 'd': + mp = 24 * 60 * 60 + case 'w': + mp = 7 * 24 * 60 * 60 + case 'y': + mp = 365 * 24 * 60 * 60 + case 'i': + mp = float64(step) / 1e3 + default: + return 0, fmt.Errorf("invalid duration suffix in %q", s) + } + return int64(mp * f * 1e3), nil +} + +func scanDuration(s string, canBeNegative bool) int { + if len(s) == 0 { + return -1 + } + i := 0 + if s[0] == '-' && canBeNegative { + i++ + } + for i < len(s) && isDecimalChar(s[i]) { + i++ + } + if i == 0 || i == len(s) { + return -1 + } + if s[i] == '.' { + j := i + i++ + for i < len(s) && isDecimalChar(s[i]) { + i++ + } + if i == j || i == len(s) { + return -1 + } + } + switch s[i] { + case 's', 'm', 'h', 'd', 'w', 'y', 'i': + return i + 1 + default: + return -1 + } +} + +func isDecimalChar(ch byte) bool { + return ch >= '0' && ch <= '9' +} + +func isIdentPrefix(s string) bool { + if len(s) == 0 { + return false + } + if s[0] == '\\' { + // Assume this is an escape char for the next char. + return true + } + return isFirstIdentChar(s[0]) +} + +func isFirstIdentChar(ch byte) bool { + if ch >= 'a' && ch <= 'z' || ch >= 'A' && ch <= 'Z' { + return true + } + return ch == '_' || ch == ':' +} + +func isIdentChar(ch byte) bool { + if isFirstIdentChar(ch) { + return true + } + return isDecimalChar(ch) || ch == '.' +} + +func isSpaceChar(ch byte) bool { + switch ch { + case ' ', '\t', '\n', '\v', '\f', '\r': + return true + default: + return false + } +} + +// Function for support graphite target +// https://graphite.readthedocs.io/en/latest/render_api.html#paths-and-wildcards +// https://github.com/golang/go/blob/release-branch.go1.14/src/path/filepath/match.go#L18 + +func isExtendGraphiteIdentChar(ch byte) bool { + return ch == '-' || ch == '|' || ch == '?' +} + +func scanCharRange(s string) int { + if len(s) == 0 || s[0] != '[' { + return -1 + } + // Ignore graphite target mix duration. eg. a.b.c[5m] + if scanDuration(s[1:], true) > -1 { + return -1 + } + i := 1 + charCount := 0 + isRange := false +scanCharRangeLabel: + for i < len(s) { + switch { + case isIdentChar(s[i]): + i++ + charCount++ + case s[i] == '\\': + // Do not verify the next char, since it is escaped. + i += 2 + if i > len(s) { + i-- + break scanCharRangeLabel + } + charCount++ + case s[i] == '^': + if i != 1 { + return -1 + } + i++ + case s[i] == '-': + if isRange { + return -1 + } + if i == 1 || i == len(s)-2 { + return -1 + } + if i < len(s)-1 && s[i+1] == '-' { + return -1 + } + isRange = true + i++ + default: + break scanCharRangeLabel + } + } + if i == 1 || i == len(s) || s[i] != ']' { + return -1 + } + if isRange { + if charCount != 2 { + return -1 + } + } + return i + 1 +} + +func scanValueList(s string) int { + if len(s) < 2 || s[0] != '{' { + return -1 + } + i := 1 +scanValueListLabel: + for i < len(s) { + switch { + case isIdentChar(s[i]) || isSpaceChar(s[i]) || s[i] == '-': + i++ + case s[i] == '\\': + // Do not verify the next char, since it is escaped. + i += 2 + if i > len(s) { + i-- + break scanValueListLabel + } + case s[i] == ',': + if i == 1 || i == len(s)-2 { + return -1 + } + if i < len(s)-1 && s[i+1] == ',' { + return -1 + } + i++ + case s[i] == '[': + j := scanCharRange(s[i:]) + if j < 0 { + return -1 + } + i += j + default: + break scanValueListLabel + } + } + if i == 1 || i == len(s) || s[i] != '}' { + return -1 + } + return i + 1 +} diff --git a/mateql/lexer_test.go b/mateql/lexer_test.go new file mode 100644 index 0000000..3ba2424 --- /dev/null +++ b/mateql/lexer_test.go @@ -0,0 +1,501 @@ +package mateql + +import ( + "reflect" + "testing" +) + +func TestUnescapeIdent(t *testing.T) { + f := func(s, resultExpected string) { + t.Helper() + result := unescapeIdent(s) + if result != resultExpected { + t.Fatalf("unexpected result for unescapeIdent(%q); got %q; want %q", s, result, resultExpected) + } + } + f("", "") + f("a", "a") + f("\\", "") + f(`\\`, `\`) + f(`\foo\-bar`, `foo-bar`) + f(`a\\\\b\"c\d`, `a\\b"cd`) + f(`foo.bar:baz_123`, `foo.bar:baz_123`) + f(`foo\ bar`, `foo bar`) + f(`\x21`, `!`) + f(`\xeDfoo\x2Fbar\-\xqw\x`, "\xedfoo\x2fbar-xqwx") +} + +func TestAppendEscapedIdent(t *testing.T) { + f := func(s, resultExpected string) { + t.Helper() + result := appendEscapedIdent(nil, s) + if string(result) != resultExpected { + t.Fatalf("unexpected result for appendEscapedIdent(%q); got %q; want %q", s, result, resultExpected) + } + } + f(`a`, `a`) + f(`a.b:c_23`, `a.b:c_23`) + f(`a b-cd+dd\`, `a\ b\-cd\+dd\\`) + f("a\x1E\x20\xee", `a\x1e\ \xee`) + f("\x2e\x2e", `\x2e.`) +} + +func TestScanIdent(t *testing.T) { + f := func(s, resultExpected string) { + t.Helper() + result := scanIdent(s) + if result != resultExpected { + t.Fatalf("unexpected result for scanIdent(%q): got %q; want %q", s, result, resultExpected) + } + } + f("a", "a") + f("foo.bar:baz_123", "foo.bar:baz_123") + f("a+b", "a") + f("foo()", "foo") + f(`a\-b+c`, `a\-b`) + f(`a\ b\\\ c\`, `a\ b\\\ c\`) +} + +func TestLexerNextPrev(t *testing.T) { + var lex lexer + lex.Init("foo bar baz") + if lex.Token != "" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "") + } + if err := lex.Next(); err != nil { + t.Fatalf("unexpeted error: %s", err) + } + if lex.Token != "foo" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "foo") + } + + // Rewind before the first item. + lex.Prev() + if lex.Token != "" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "") + } + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "foo" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "foo") + } + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "bar" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "bar") + } + + // Rewind to the first item. + lex.Prev() + if lex.Token != "foo" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "foo") + } + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "bar" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "bar") + } + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "baz" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "baz") + } + + // Go beyond the token stream. + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "") + } + if !isEOF(lex.Token) { + t.Fatalf("expecting eof") + } + lex.Prev() + if lex.Token != "baz" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "baz") + } + + // Go multiple times lex.Next() beyond token stream. + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "") + } + if !isEOF(lex.Token) { + t.Fatalf("expecting eof") + } + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if lex.Token != "" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "") + } + if !isEOF(lex.Token) { + t.Fatalf("expecting eof") + } + lex.Prev() + if lex.Token != "" { + t.Fatalf("unexpected token got: %q; want %q", lex.Token, "") + } + if !isEOF(lex.Token) { + t.Fatalf("expecting eof") + } +} + +func TestLexerSuccess(t *testing.T) { + var s string + var expectedTokens []string + + // An empty string + s = "" + expectedTokens = nil + testLexerSuccess(t, s, expectedTokens) + + // String with whitespace + s = " \n\t\r " + expectedTokens = nil + testLexerSuccess(t, s, expectedTokens) + + // Just metric name + s = "metric" + expectedTokens = []string{"metric"} + testLexerSuccess(t, s, expectedTokens) + + // Metric name with spec chars + s = ":foo.bar_" + expectedTokens = []string{":foo.bar_"} + testLexerSuccess(t, s, expectedTokens) + + // Metric name with window + s = "metric[5m] " + expectedTokens = []string{"metric", "[", "5m", "]"} + testLexerSuccess(t, s, expectedTokens) + + // Metric name with tag filters + s = ` metric:12.34{a="foo", b != "bar", c=~ "x.+y", d !~ "zzz"}` + expectedTokens = []string{`metric:12.34`, `{`, `a`, `=`, `"foo"`, `,`, `b`, `!=`, `"bar"`, `,`, `c`, `=~`, `"x.+y"`, `,`, `d`, `!~`, `"zzz"`, `}`} + testLexerSuccess(t, s, expectedTokens) + + // Metric name with offset + s = ` metric offset 10d ` + expectedTokens = []string{`metric`, `offset`, `10d`} + testLexerSuccess(t, s, expectedTokens) + + // Func call + s = `sum ( metric{x="y" } [5m] offset 10h)` + expectedTokens = []string{`sum`, `(`, `metric`, `{`, `x`, `=`, `"y"`, `}`, `[`, `5m`, `]`, `offset`, `10h`, `)`} + testLexerSuccess(t, s, expectedTokens) + + // Binary op + s = `a+b or c % d and e unless f` + expectedTokens = []string{`a`, `+`, `b`, `or`, `c`, `%`, `d`, `and`, `e`, `unless`, `f`} + testLexerSuccess(t, s, expectedTokens) + + // Numbers + s = `3+1.2-.23+4.5e5-78e-6+1.24e+45-NaN+Inf` + expectedTokens = []string{`3`, `+`, `1.2`, `-`, `.23`, `+`, `4.5e5`, `-`, `78e-6`, `+`, `1.24e+45`, `-`, `NaN`, `+`, `Inf`} + testLexerSuccess(t, s, expectedTokens) + + s = `12.34` + expectedTokens = []string{`12.34`} + testLexerSuccess(t, s, expectedTokens) + + // Strings + s = `""''` + "``" + `"\\" '\\' "\"" '\''"\\\"\\"` + expectedTokens = []string{`""`, `''`, "``", `"\\"`, `'\\'`, `"\""`, `'\''`, `"\\\"\\"`} + testLexerSuccess(t, s, expectedTokens) + + s = " `foo\\\\\\`бар` " + expectedTokens = []string{"`foo\\\\\\`бар`"} + testLexerSuccess(t, s, expectedTokens) + + s = `# comment # sdf + foobar # comment + baz + # yet another comment` + expectedTokens = []string{"foobar", "baz"} + testLexerSuccess(t, s, expectedTokens) +} + +func testLexerSuccess(t *testing.T, s string, expectedTokens []string) { + t.Helper() + + var lex lexer + lex.Init(s) + + var tokens []string + for { + if err := lex.Next(); err != nil { + t.Fatalf("unexpected error: %s", err) + } + if isEOF(lex.Token) { + break + } + tokens = append(tokens, lex.Token) + } + if !reflect.DeepEqual(tokens, expectedTokens) { + t.Fatalf("unexected tokens\ngot\n%q\nwant\n%q", tokens, expectedTokens) + } +} + +func TestLexerError(t *testing.T) { + // Invalid identifier + testLexerError(t, ".foo") + + // Incomplete string + testLexerError(t, `"foobar`) + testLexerError(t, `'`) + testLexerError(t, "`") + + // Unrecognized char + testLexerError(t, "тест") + + // Invalid numbers + testLexerError(t, `.`) + testLexerError(t, `123.`) + testLexerError(t, `12e`) + testLexerError(t, `1.2e`) + testLexerError(t, `1.2E+`) + testLexerError(t, `1.2E-`) +} + +func testLexerError(t *testing.T, s string) { + t.Helper() + + var lex lexer + lex.Init(s) + for { + if err := lex.Next(); err != nil { + // Expected error + break + } + if isEOF(lex.Token) { + t.Fatalf("expecting error during parse") + } + } + + // Try calling Next again. It must return error. + if err := lex.Next(); err == nil { + t.Fatalf("expecting non-nil error") + } +} + +func TestPositiveDurationSuccess(t *testing.T) { + f := func(s string, step, expectedD int64) { + t.Helper() + d, err := PositiveDurationValue(s, step) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + if d != expectedD { + t.Fatalf("unexpected duration; got %d; want %d", d, expectedD) + } + } + + // Integer durations + f("123s", 42, 123*1000) + f("123m", 42, 123*60*1000) + f("1h", 42, 1*60*60*1000) + f("2d", 42, 2*24*60*60*1000) + f("3w", 42, 3*7*24*60*60*1000) + f("4y", 42, 4*365*24*60*60*1000) + f("1i", 42*1000, 42*1000) + f("3i", 42, 3*42) + + // Float durations + f("0.234s", 42, 234) + f("1.5s", 42, 1.5*1000) + f("1.5m", 42, 1.5*60*1000) + f("1.2h", 42, 1.2*60*60*1000) + f("1.1d", 42, 1.1*24*60*60*1000) + f("1.1w", 42, 1.1*7*24*60*60*1000) + f("1.3y", 42, 1.3*365*24*60*60*1000) + f("0.1i", 12340, 0.1*12340) +} + +func TestPositiveDurationError(t *testing.T) { + f := func(s string) { + t.Helper() + if isPositiveDuration(s) { + t.Fatalf("unexpected valid duration %q", s) + } + d, err := PositiveDurationValue(s, 42) + if err == nil { + t.Fatalf("expecting non-nil error for duration %q", s) + } + if d != 0 { + t.Fatalf("expecting zero duration; got %d", d) + } + } + f("") + f("foo") + f("m") + f("12") + f("1.23") + f("1.23mm") + f("123q") + f("-123s") +} + +func TestDurationSuccess(t *testing.T) { + f := func(s string, step, expectedD int64) { + t.Helper() + d, err := DurationValue(s, step) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + if d != expectedD { + t.Fatalf("unexpected duration; got %d; want %d", d, expectedD) + } + } + + // Integer durations + f("123s", 42, 123*1000) + f("-123s", 42, -123*1000) + f("123m", 42, 123*60*1000) + f("1h", 42, 1*60*60*1000) + f("2d", 42, 2*24*60*60*1000) + f("3w", 42, 3*7*24*60*60*1000) + f("4y", 42, 4*365*24*60*60*1000) + f("1i", 42*1000, 42*1000) + f("3i", 42, 3*42) + f("-3i", 42, -3*42) + + // Float durations + f("0.234s", 42, 234) + f("-0.234s", 42, -234) + f("1.5s", 42, 1.5*1000) + f("1.5m", 42, 1.5*60*1000) + f("1.2h", 42, 1.2*60*60*1000) + f("1.1d", 42, 1.1*24*60*60*1000) + f("1.1w", 42, 1.1*7*24*60*60*1000) + f("1.3y", 42, 1.3*365*24*60*60*1000) + f("-1.3y", 42, -1.3*365*24*60*60*1000) + f("0.1i", 12340, 0.1*12340) +} + +func TestDurationError(t *testing.T) { + f := func(s string) { + t.Helper() + d, err := DurationValue(s, 42) + if err == nil { + t.Fatalf("expecting non-nil error for duration %q", s) + } + if d != 0 { + t.Fatalf("expecting zero duration; got %d", d) + } + } + f("") + f("foo") + f("m") + f("12") + f("1.23") + f("1.23mm") + f("123q") +} + +func TestScanGraphiteIdent(t *testing.T) { + f := func(s, resultExpected string) { + t.Helper() + result := scanIdent(s) + if result != resultExpected { + t.Fatalf("unexpected result for scanIdent(%q): got %q; want %q", s, result, resultExpected) + } + } + f("g.b", "g.b") + f("g.*", "g.*") + f("g.*b", "g.*b") + f("g.b*", "g.b*") + f("g.b*.c", "g.b*.c") + f("g.a*b?c*x.f", "g.a*b?c*x.f") + f("g.ab[c]", "g.ab[c]") + f("g.ab[b-d]", "g.ab[b-d]") + f("g.ab[^c]", "g.ab[^c]") + f("g.ab[^b-d]", "g.ab[^b-d]") + f(`g.a\*b`, `g.a\*b`) + f("g.a?b", "g.a?b") + f("g.a[^a]b", "g.a[^a]b") + f("g.a???b", "g.a???b") + f("g.a[^a][^a][^a]b", "g.a[^a][^a][^a]b") + f("g.[a-z]*", "g.[a-z]*") + f("g.a?b", "g.a?b") + f("g.a*b", "g.a*b") + f(`g.[\]a]`, `g.[\]a]`) + f(`g.[\-]`, `g.[\-]`) + f(`g.[x\-]`, `g.[x\-]`) + f(`g.[\-x]`, `g.[\-x]`) + f(`g.[\-x]`, `g.[\-x]`) + f(`g-a-b.c-d.e`, `g-a-b.c-d.e`) + f(`g-a`, `g`) + f(`g-a-b`, `g`) +} + +func TestScanCharRange(t *testing.T) { + f := func(s string, resultExpected int) { + t.Helper() + result := scanCharRange(s) + if resultExpected == 0 { + resultExpected = len(s) + } + if result != resultExpected { + t.Fatalf("unexpected result for scanIdent(%q): got %d; want %d", s, result, resultExpected) + } + } + f("[", -1) + f("]", -1) + f("[-", -1) + f("[a-", -1) + f("[a-]", -1) + f("[a--b]", -1) + f("[]", -1) + f("[]a]", -1) + f("[-]", -1) + f("[-x]", -1) + f("[a-b-c]", -1) + f("[5m]", -1) + f("[^", -1) + f("[a b]", -1) + f("[bc^]", -1) + f("[af-z]", -1) + f("[a-xz]", -1) + f("[a]", 0) + f("[^a]", 0) + f("[^abc]", 0) + f("[abcd]", 0) + f("[3-9]", 0) + f("[a-z]", 0) + f(`[\a-z]`, 0) +} + +func TestScanValueList(t *testing.T) { + f := func(s string, resultExpected int) { + t.Helper() + result := scanValueList(s) + if resultExpected == 0 { + resultExpected = len(s) + } + if result != resultExpected { + t.Fatalf("unexpected result for scanIdent(%q): got %d; want %d", s, result, resultExpected) + } + } + f("{", -1) + f("}", -1) + f("{,", -1) + f("{a,", -1) + f("{a,}", -1) + f("{a,,b}", -1) + f("{}", -1) + f("{,}", -1) + f("{a}", 0) + f("{a,b}", 0) + f("{a, b}", 0) + f(`{a,\},b}`, 0) + f(`{a-b,c}`, 0) + f("{0[3-9],1[0-9],20}", 0) +} diff --git a/mateql/parser.go b/mateql/parser.go new file mode 100755 index 0000000..95d6a65 --- /dev/null +++ b/mateql/parser.go @@ -0,0 +1,1744 @@ +package mateql + +import ( + "fmt" + "strconv" + "strings" + "sync" +) + +// Parse parses MetricsQL query s. +// +// All the `WITH` expressions are expanded in the returned Expr. +// +// MetricsQL is backwards-compatible with PromQL. +func Parse(s string) (Expr, error) { + var p parser + p.lex.Init(s) + if err := p.lex.Next(); err != nil { + return nil, fmt.Errorf(`cannot find the first token: %s`, err) + } + e, err := p.parseExpr() + if err != nil { + return nil, fmt.Errorf(`%s; unparsed data: %q`, err, p.lex.Context()) + } + if !isEOF(p.lex.Token) { + return nil, fmt.Errorf(`unparsed data left: %q`, p.lex.Context()) + } + was := getDefaultWithArgExprs() + if e, err = expandWithExpr(was, e); err != nil { + return nil, fmt.Errorf(`cannot expand WITH expressions: %s`, err) + } + e = removeParensExpr(e) + e = simplifyConstants(e) + return e, nil +} + +// Expr holds any of *Expr types. +type Expr interface { + // AppendString appends string representation of Expr to dst. + AppendString(dst []byte) []byte +} + +func getDefaultWithArgExprs() []*withArgExpr { + defaultWithArgExprsOnce.Do(func() { + defaultWithArgExprs = prepareWithArgExprs([]string{ + // ru - resource utilization + `ru(freev, maxv) = clamp_min(maxv - clamp_min(freev, 0), 0) / clamp_min(maxv, 0) * 100`, + + // ttf - time to fuckup + `ttf(freev) = smooth_exponential( + clamp_max(clamp_max(-freev, 0) / clamp_max(deriv_fast(freev), 0), 365*24*3600), + clamp_max(step()/300, 1) + )`, + + `median_over_time(m) = quantile_over_time(0.5, m)`, + `range_median(q) = range_quantile(0.5, q)`, + `alias(q, name) = label_set(q, "__name__", name)`, + }) + }) + return defaultWithArgExprs +} + +var ( + defaultWithArgExprs []*withArgExpr + defaultWithArgExprsOnce sync.Once +) + +func prepareWithArgExprs(ss []string) []*withArgExpr { + was := make([]*withArgExpr, len(ss)) + for i, s := range ss { + was[i] = mustParseWithArgExpr(s) + } + if err := checkDuplicateWithArgNames(was); err != nil { + panic(fmt.Errorf("BUG: %s", err)) + } + return was +} + +func checkDuplicateWithArgNames(was []*withArgExpr) error { + m := make(map[string]*withArgExpr, len(was)) + for _, wa := range was { + if waOld := m[wa.Name]; waOld != nil { + return fmt.Errorf("duplicate `with` arg name for: %s; previous one: %s", wa, waOld.AppendString(nil)) + } + m[wa.Name] = wa + } + return nil +} + +func mustParseWithArgExpr(s string) *withArgExpr { + var p parser + p.lex.Init(s) + if err := p.lex.Next(); err != nil { + panic(fmt.Errorf("BUG: cannot find the first token in %q: %s", s, err)) + } + wa, err := p.parseWithArgExpr() + if err != nil { + panic(fmt.Errorf("BUG: cannot parse %q: %s; unparsed data: %q", s, err, p.lex.Context())) + } + return wa +} + +// removeParensExpr removes parensExpr for (Expr) case. +func removeParensExpr(e Expr) Expr { + if re, ok := e.(*RollupExpr); ok { + re.Expr = removeParensExpr(re.Expr) + return re + } + if be, ok := e.(*BinaryOpExpr); ok { + be.Left = removeParensExpr(be.Left) + be.Right = removeParensExpr(be.Right) + return be + } + if ae, ok := e.(*AggrFuncExpr); ok { + for i, arg := range ae.Args { + ae.Args[i] = removeParensExpr(arg) + } + return ae + } + if fe, ok := e.(*FuncExpr); ok { + for i, arg := range fe.Args { + fe.Args[i] = removeParensExpr(arg) + } + return fe + } + if pe, ok := e.(*parensExpr); ok { + args := *pe + for i, arg := range args { + args[i] = removeParensExpr(arg) + } + if len(*pe) == 1 { + return args[0] + } + // Treat parensExpr as a function with empty name, i.e. union() + fe := &FuncExpr{ + Name: "", + Args: args, + } + return fe + } + return e +} + +func simplifyConstants(e Expr) Expr { + if re, ok := e.(*RollupExpr); ok { + re.Expr = simplifyConstants(re.Expr) + return re + } + if ae, ok := e.(*AggrFuncExpr); ok { + simplifyConstantsInplace(ae.Args) + return ae + } + if fe, ok := e.(*FuncExpr); ok { + simplifyConstantsInplace(fe.Args) + return fe + } + if pe, ok := e.(*parensExpr); ok { + if len(*pe) == 1 { + return simplifyConstants((*pe)[0]) + } + simplifyConstantsInplace(*pe) + return pe + } + be, ok := e.(*BinaryOpExpr) + if !ok { + return e + } + + be.Left = simplifyConstants(be.Left) + be.Right = simplifyConstants(be.Right) + + lne, ok := be.Left.(*NumberExpr) + if !ok { + return be + } + rne, ok := be.Right.(*NumberExpr) + if !ok { + return be + } + n := binaryOpEval(be.Op, lne.N, rne.N, be.Bool) + ne := &NumberExpr{ + N: n, + } + return ne +} + +func simplifyConstantsInplace(args []Expr) { + for i, arg := range args { + args[i] = simplifyConstants(arg) + } +} + +// parser parses MetricsQL expression. +// +// preconditions for all parser.parse* funcs: +// - p.lex.Token should point to the first token to parse. +// +// postconditions for all parser.parse* funcs: +// - p.lex.Token should point to the next token after the parsed token. +type parser struct { + lex lexer +} + +func isWith(s string) bool { + s = strings.ToLower(s) + return s == "with" +} + +// parseWithExpr parses `WITH (withArgExpr...) expr`. +func (p *parser) parseWithExpr() (*withExpr, error) { + var we withExpr + if !isWith(p.lex.Token) { + return nil, fmt.Errorf("withExpr: unexpected token %q; want `WITH`", p.lex.Token) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token != "(" { + return nil, fmt.Errorf(`withExpr: unexpected token %q; want "("`, p.lex.Token) + } + for { + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == ")" { + goto end + } + wa, err := p.parseWithArgExpr() + if err != nil { + return nil, err + } + we.Was = append(we.Was, wa) + switch p.lex.Token { + case ",": + continue + case ")": + goto end + default: + return nil, fmt.Errorf(`withExpr: unexpected token %q; want ",", ")"`, p.lex.Token) + } + } + +end: + if err := checkDuplicateWithArgNames(we.Was); err != nil { + return nil, err + } + if err := p.lex.Next(); err != nil { + return nil, err + } + e, err := p.parseExpr() + if err != nil { + return nil, err + } + we.Expr = e + return &we, nil +} + +func (p *parser) parseWithArgExpr() (*withArgExpr, error) { + var wa withArgExpr + if !isIdentPrefix(p.lex.Token) { + return nil, fmt.Errorf(`withArgExpr: unexpected token %q; want "ident"`, p.lex.Token) + } + wa.Name = p.lex.Token + if isAggrFunc(wa.Name) || IsRollupFunc(wa.Name) || IsTransformFunc(wa.Name) || isWith(wa.Name) { + return nil, fmt.Errorf(`withArgExpr: cannot use reserved name %q`, wa.Name) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == "(" { + // Parse func args. + args, err := p.parseIdentList() + if err != nil { + return nil, fmt.Errorf(`withArgExpr: cannot parse args for %q: %s`, wa.Name, err) + } + // Make sure all the args have different names + m := make(map[string]bool, len(args)) + for _, arg := range args { + if m[arg] { + return nil, fmt.Errorf(`withArgExpr: duplicate func arg found in %q: %q`, wa.Name, arg) + } + m[arg] = true + } + wa.Args = args + } + if p.lex.Token != "=" { + return nil, fmt.Errorf(`withArgExpr: unexpected token %q; want "="`, p.lex.Token) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + e, err := p.parseExpr() + if err != nil { + return nil, fmt.Errorf(`withArgExpr: cannot parse %q: %s`, wa.Name, err) + } + wa.Expr = e + return &wa, nil +} + +func (p *parser) parseExpr() (Expr, error) { + e, err := p.parseSingleExpr() + if err != nil { + return nil, err + } + for { + if !isBinaryOp(p.lex.Token) { + return e, nil + } + + var be BinaryOpExpr + be.Op = strings.ToLower(p.lex.Token) + be.Left = e + if err := p.lex.Next(); err != nil { + return nil, err + } + if isBinaryOpBoolModifier(p.lex.Token) { + if !IsBinaryOpCmp(be.Op) { + return nil, fmt.Errorf(`bool modifier cannot be applied to %q`, be.Op) + } + be.Bool = true + if err := p.lex.Next(); err != nil { + return nil, err + } + } + if isBinaryOpGroupModifier(p.lex.Token) { + if err := p.parseModifierExpr(&be.GroupModifier); err != nil { + return nil, err + } + if isBinaryOpJoinModifier(p.lex.Token) { + if isBinaryOpLogicalSet(be.Op) { + return nil, fmt.Errorf(`modifier %q cannot be applied to %q`, p.lex.Token, be.Op) + } + if err := p.parseModifierExpr(&be.JoinModifier); err != nil { + return nil, err + } + } + } + e2, err := p.parseSingleExpr() + if err != nil { + return nil, err + } + be.Right = e2 + e = balanceBinaryOp(&be) + } +} + +func balanceBinaryOp(be *BinaryOpExpr) Expr { + bel, ok := be.Left.(*BinaryOpExpr) + if !ok { + return be + } + lp := binaryOpPriority(bel.Op) + rp := binaryOpPriority(be.Op) + if rp < lp { + return be + } + if rp == lp && !isRightAssociativeBinaryOp(be.Op) { + return be + } + be.Left = bel.Right + bel.Right = balanceBinaryOp(be) + return bel +} + +// parseSingleExpr parses non-binaryOp expressions. +func (p *parser) parseSingleExpr() (Expr, error) { + if isWith(p.lex.Token) { + err := p.lex.Next() + nextToken := p.lex.Token + p.lex.Prev() + if err == nil && nextToken == "(" { + return p.parseWithExpr() + } + } + e, err := p.parseSingleExprWithoutRollupSuffix() + if err != nil { + return nil, err + } + if p.lex.Token != "[" && !isOffset(p.lex.Token) { + // There is no rollup expression. + return e, nil + } + return p.parseRollupExpr(e) +} + +func (p *parser) parseSingleExprWithoutRollupSuffix() (Expr, error) { + if isPositiveNumberPrefix(p.lex.Token) || isInfOrNaN(p.lex.Token) { + return p.parsePositiveNumberExpr() + } + if isStringPrefix(p.lex.Token) { + return p.parseStringExpr() + } + if isIdentPrefix(p.lex.Token) { + return p.parseIdentExpr() + } + switch p.lex.Token { + case "(": + return p.parseParensExpr() + case "{": + return p.parseMetricExpr() + case "-": + // Unary minus. Substitute -expr with (0 - expr) + if err := p.lex.Next(); err != nil { + return nil, err + } + e, err := p.parseSingleExpr() + if err != nil { + return nil, err + } + be := &BinaryOpExpr{ + Op: "-", + Left: &NumberExpr{ + N: 0, + }, + Right: e, + } + pe := parensExpr{be} + return &pe, nil + case "+": + // Unary plus + if err := p.lex.Next(); err != nil { + return nil, err + } + return p.parseSingleExpr() + default: + return nil, fmt.Errorf(`singleExpr: unexpected token %q; want "(", "{", "-", "+"`, p.lex.Token) + } +} + +func (p *parser) parsePositiveNumberExpr() (*NumberExpr, error) { + if !isPositiveNumberPrefix(p.lex.Token) && !isInfOrNaN(p.lex.Token) { + return nil, fmt.Errorf(`positiveNumberExpr: unexpected token %q; want "number"`, p.lex.Token) + } + + n, err := strconv.ParseFloat(p.lex.Token, 64) + if err != nil { + return nil, fmt.Errorf(`positiveNumberExpr: cannot parse %q: %s`, p.lex.Token, err) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + ne := &NumberExpr{ + N: n, + } + return ne, nil +} + +func (p *parser) parseStringExpr() (*StringExpr, error) { + var se StringExpr + + for { + switch { + case isStringPrefix(p.lex.Token) || isIdentPrefix(p.lex.Token): + se.tokens = append(se.tokens, p.lex.Token) + default: + return nil, fmt.Errorf(`StringExpr: unexpected token %q; want "string"`, p.lex.Token) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token != "+" { + return &se, nil + } + + // composite StringExpr like `"s1" + "s2"`, `"s" + m()` or `"s" + m{}` or `"s" + unknownToken`. + if err := p.lex.Next(); err != nil { + return nil, err + } + if isStringPrefix(p.lex.Token) { + // "s1" + "s2" + continue + } + if !isIdentPrefix(p.lex.Token) { + // "s" + unknownToken + p.lex.Prev() + return &se, nil + } + // Look after ident + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == "(" || p.lex.Token == "{" { + // `"s" + m(` or `"s" + m{` + p.lex.Prev() + p.lex.Prev() + return &se, nil + } + // "s" + ident + p.lex.Prev() + } +} + +func (p *parser) parseParensExpr() (*parensExpr, error) { + if p.lex.Token != "(" { + return nil, fmt.Errorf(`parensExpr: unexpected token %q; want "("`, p.lex.Token) + } + var exprs []Expr + for { + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == ")" { + break + } + expr, err := p.parseExpr() + if err != nil { + return nil, err + } + exprs = append(exprs, expr) + if p.lex.Token == "," { + continue + } + if p.lex.Token == ")" { + break + } + return nil, fmt.Errorf(`parensExpr: unexpected token %q; want "," or ")"`, p.lex.Token) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + pe := parensExpr(exprs) + return &pe, nil +} + +func (p *parser) parseAggrFuncExpr() (*AggrFuncExpr, error) { + if !isAggrFunc(p.lex.Token) { + return nil, fmt.Errorf(`AggrFuncExpr: unexpected token %q; want aggregate func`, p.lex.Token) + } + + var ae AggrFuncExpr + ae.Name = strings.ToLower(p.lex.Token) + if err := p.lex.Next(); err != nil { + return nil, err + } + if isIdentPrefix(p.lex.Token) { + goto funcPrefixLabel + } + if p.lex.Token == "(" { + goto funcArgsLabel + } + return nil, fmt.Errorf(`AggrFuncExpr: unexpected token %q; want "("`, p.lex.Token) + +funcPrefixLabel: + { + if !isAggrFuncModifier(p.lex.Token) { + return nil, fmt.Errorf(`AggrFuncExpr: unexpected token %q; want aggregate func modifier`, p.lex.Token) + } + if err := p.parseModifierExpr(&ae.Modifier); err != nil { + return nil, err + } + } + +funcArgsLabel: + { + args, err := p.parseArgListExpr() + if err != nil { + return nil, err + } + ae.Args = args + + // Verify whether func suffix exists. + if ae.Modifier.Op == "" && isAggrFuncModifier(p.lex.Token) { + if err := p.parseModifierExpr(&ae.Modifier); err != nil { + return nil, err + } + } + + // Check for optional limit. + if strings.ToLower(p.lex.Token) == "limit" { + if err := p.lex.Next(); err != nil { + return nil, err + } + limit, err := strconv.Atoi(p.lex.Token) + if err != nil { + return nil, fmt.Errorf("cannot parse limit %q: %s", p.lex.Token, err) + } + if err := p.lex.Next(); err != nil { + return nil, err + } + ae.Limit = limit + } + return &ae, nil + } +} + +func expandWithExpr(was []*withArgExpr, e Expr) (Expr, error) { + switch t := e.(type) { + case *BinaryOpExpr: + left, err := expandWithExpr(was, t.Left) + if err != nil { + return nil, err + } + right, err := expandWithExpr(was, t.Right) + if err != nil { + return nil, err + } + groupModifierArgs, err := expandModifierArgs(was, t.GroupModifier.Args) + if err != nil { + return nil, err + } + joinModifierArgs, err := expandModifierArgs(was, t.JoinModifier.Args) + if err != nil { + return nil, err + } + if t.Op == "+" { + lse, lok := left.(*StringExpr) + rse, rok := right.(*StringExpr) + if lok && rok { + se := &StringExpr{ + S: lse.S + rse.S, + } + return se, nil + } + } + be := &BinaryOpExpr{ + Op: t.Op, + Bool: t.Bool, + GroupModifier: t.GroupModifier, + JoinModifier: t.JoinModifier, + Left: left, + Right: right, + } + be.GroupModifier.Args = groupModifierArgs + be.JoinModifier.Args = joinModifierArgs + pe := parensExpr{be} + return &pe, nil + case *FuncExpr: + args, err := expandWithArgs(was, t.Args) + if err != nil { + return nil, err + } + wa := getWithArgExpr(was, t.Name) + if wa == nil { + fe := &FuncExpr{ + Name: t.Name, + Args: args, + } + return fe, nil + } + return expandWithExprExt(was, wa, args) + case *AggrFuncExpr: + args, err := expandWithArgs(was, t.Args) + if err != nil { + return nil, err + } + modifierArgs, err := expandModifierArgs(was, t.Modifier.Args) + if err != nil { + return nil, err + } + ae := &AggrFuncExpr{ + Name: t.Name, + Args: args, + Modifier: t.Modifier, + Limit: t.Limit, + } + ae.Modifier.Args = modifierArgs + return ae, nil + case *parensExpr: + exprs, err := expandWithArgs(was, *t) + if err != nil { + return nil, err + } + pe := parensExpr(exprs) + return &pe, nil + case *StringExpr: + if len(t.S) > 0 { + // Already expanded. + return t, nil + } + var b []byte + for _, token := range t.tokens { + if isStringPrefix(token) { + s, err := extractStringValue(token) + if err != nil { + return nil, err + } + b = append(b, s...) + continue + } + wa := getWithArgExpr(was, token) + if wa == nil { + return nil, fmt.Errorf("missing %q value inside StringExpr", token) + } + eNew, err := expandWithExprExt(was, wa, nil) + if err != nil { + return nil, err + } + seSrc, ok := eNew.(*StringExpr) + if !ok { + return nil, fmt.Errorf("%q must be string expression; got %q", token, eNew.AppendString(nil)) + } + if len(seSrc.tokens) > 0 { + panic(fmt.Errorf("BUG: seSrc.tokens must be empty; got %q", seSrc.tokens)) + } + b = append(b, seSrc.S...) + } + se := &StringExpr{ + S: string(b), + } + return se, nil + case *RollupExpr: + eNew, err := expandWithExpr(was, t.Expr) + if err != nil { + return nil, err + } + re := *t + re.Expr = eNew + return &re, nil + case *withExpr: + wasNew := make([]*withArgExpr, 0, len(was)+len(t.Was)) + wasNew = append(wasNew, was...) + wasNew = append(wasNew, t.Was...) + eNew, err := expandWithExpr(wasNew, t.Expr) + if err != nil { + return nil, err + } + return eNew, nil + case *MetricExpr: + if len(t.LabelFilters) > 0 { + // Already expanded. + return t, nil + } + { + var me MetricExpr + // Populate me.LabelFilters + for _, lfe := range t.labelFilters { + if lfe.Value == nil { + // Expand lfe.Label into []LabelFilter. + wa := getWithArgExpr(was, lfe.Label) + if wa == nil { + return nil, fmt.Errorf("missing %q value inside %q", lfe.Label, t.AppendString(nil)) + } + eNew, err := expandWithExprExt(was, wa, nil) + if err != nil { + return nil, err + } + wme, ok := eNew.(*MetricExpr) + if !ok || wme.hasNonEmptyMetricGroup() { + return nil, fmt.Errorf("%q must be filters expression inside %q; got %q", lfe.Label, t.AppendString(nil), eNew.AppendString(nil)) + } + if len(wme.labelFilters) > 0 { + panic(fmt.Errorf("BUG: wme.labelFilters must be empty; got %s", wme.labelFilters)) + } + me.LabelFilters = append(me.LabelFilters, wme.LabelFilters...) + continue + } + + // convert lfe to LabelFilter. + se, err := expandWithExpr(was, lfe.Value) + if err != nil { + return nil, err + } + var lfeNew labelFilterExpr + lfeNew.Label = lfe.Label + lfeNew.Value = se.(*StringExpr) + lfeNew.IsNegative = lfe.IsNegative + lfeNew.IsRegexp = lfe.IsRegexp + lf, err := lfeNew.toLabelFilter() + if err != nil { + return nil, err + } + me.LabelFilters = append(me.LabelFilters, *lf) + } + me.LabelFilters = removeDuplicateLabelFilters(me.LabelFilters) + t = &me + } + if !t.hasNonEmptyMetricGroup() { + return t, nil + } + k := string(appendEscapedIdent(nil, t.LabelFilters[0].Value)) + wa := getWithArgExpr(was, k) + if wa == nil { + return t, nil + } + eNew, err := expandWithExprExt(was, wa, nil) + if err != nil { + return nil, err + } + var wme *MetricExpr + re, _ := eNew.(*RollupExpr) + if re != nil { + wme, _ = re.Expr.(*MetricExpr) + } else { + wme, _ = eNew.(*MetricExpr) + } + if wme == nil { + if !t.isOnlyMetricGroup() { + return nil, fmt.Errorf("cannot expand %q to non-metric expression %q", t.AppendString(nil), eNew.AppendString(nil)) + } + return eNew, nil + } + if len(wme.labelFilters) > 0 { + panic(fmt.Errorf("BUG: wme.labelFilters must be empty; got %s", wme.labelFilters)) + } + + var me MetricExpr + me.LabelFilters = append(me.LabelFilters, wme.LabelFilters...) + me.LabelFilters = append(me.LabelFilters, t.LabelFilters[1:]...) + me.LabelFilters = removeDuplicateLabelFilters(me.LabelFilters) + + if re == nil { + return &me, nil + } + reNew := *re + reNew.Expr = &me + return &reNew, nil + default: + return e, nil + } +} + +func expandWithArgs(was []*withArgExpr, args []Expr) ([]Expr, error) { + dstArgs := make([]Expr, len(args)) + for i, arg := range args { + dstArg, err := expandWithExpr(was, arg) + if err != nil { + return nil, err + } + dstArgs[i] = dstArg + } + return dstArgs, nil +} + +func expandModifierArgs(was []*withArgExpr, args []string) ([]string, error) { + if len(args) == 0 { + return nil, nil + } + dstArgs := make([]string, 0, len(args)) + for _, arg := range args { + wa := getWithArgExpr(was, arg) + if wa == nil { + // Leave the arg as is. + dstArgs = append(dstArgs, arg) + continue + } + if len(wa.Args) > 0 { + // Template funcs cannot be used inside modifier list. Leave the arg as is. + dstArgs = append(dstArgs, arg) + continue + } + me, ok := wa.Expr.(*MetricExpr) + if ok { + if !me.isOnlyMetricGroup() { + return nil, fmt.Errorf("cannot use %q instead of %q in %s", me.AppendString(nil), arg, args) + } + dstArg := me.LabelFilters[0].Value + dstArgs = append(dstArgs, dstArg) + continue + } + pe, ok := wa.Expr.(*parensExpr) + if ok { + for _, pArg := range *pe { + me, ok := pArg.(*MetricExpr) + if !ok || !me.isOnlyMetricGroup() { + return nil, fmt.Errorf("cannot use %q instead of %q in %s", pe.AppendString(nil), arg, args) + } + dstArg := me.LabelFilters[0].Value + dstArgs = append(dstArgs, dstArg) + } + continue + } + return nil, fmt.Errorf("cannot use %q instead of %q in %s", wa.Expr.AppendString(nil), arg, args) + } + + // Remove duplicate args from dstArgs + m := make(map[string]bool, len(dstArgs)) + filteredArgs := dstArgs[:0] + for _, arg := range dstArgs { + if !m[arg] { + filteredArgs = append(filteredArgs, arg) + m[arg] = true + } + } + return filteredArgs, nil +} + +func expandWithExprExt(was []*withArgExpr, wa *withArgExpr, args []Expr) (Expr, error) { + if len(wa.Args) != len(args) { + if args == nil { + // Just return MetricExpr with the wa.Name name. + return newMetricExpr(wa.Name), nil + } + return nil, fmt.Errorf("invalid number of args for %q; got %d; want %d", wa.Name, len(args), len(wa.Args)) + } + wasNew := make([]*withArgExpr, 0, len(was)+len(args)) + for _, waTmp := range was { + if waTmp == wa { + break + } + wasNew = append(wasNew, waTmp) + } + for i, arg := range args { + wasNew = append(wasNew, &withArgExpr{ + Name: wa.Args[i], + Expr: arg, + }) + } + return expandWithExpr(wasNew, wa.Expr) +} + +func newMetricExpr(name string) *MetricExpr { + return &MetricExpr{ + LabelFilters: []LabelFilter{{ + Label: "__name__", + Value: name, + }}, + } +} + +func extractStringValue(token string) (string, error) { + if !isStringPrefix(token) { + return "", fmt.Errorf(`StringExpr must contain only string literals; got %q`, token) + } + + // See https://prometheus.io/docs/prometheus/latest/querying/basics/#string-literals + if token[0] == '\'' { + if len(token) < 2 || token[len(token)-1] != '\'' { + return "", fmt.Errorf(`string literal contains unexpected trailing char; got %q`, token) + } + token = token[1 : len(token)-1] + token = strings.Replace(token, "\\'", "'", -1) + token = strings.Replace(token, `"`, `\"`, -1) + token = `"` + token + `"` + } + s, err := strconv.Unquote(token) + if err != nil { + return "", fmt.Errorf(`cannot parse string literal %q: %s`, token, err) + } + return s, nil +} + +func removeDuplicateLabelFilters(lfs []LabelFilter) []LabelFilter { + lfsm := make(map[string]bool, len(lfs)) + lfsNew := lfs[:0] + var buf []byte + for i := range lfs { + lf := &lfs[i] + buf = lf.AppendString(buf[:0]) + if lfsm[string(buf)] { + continue + } + lfsm[string(buf)] = true + lfsNew = append(lfsNew, *lf) + } + return lfsNew +} + +func (p *parser) parseFuncExpr() (*FuncExpr, error) { + if !isIdentPrefix(p.lex.Token) { + return nil, fmt.Errorf(`FuncExpr: unexpected token %q; want "ident"`, p.lex.Token) + } + + var fe FuncExpr + fe.Name = p.lex.Token + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token != "(" { + return nil, fmt.Errorf(`FuncExpr; unexpected token %q; want "("`, p.lex.Token) + } + args, err := p.parseArgListExpr() + if err != nil { + return nil, err + } + fe.Args = args + return &fe, nil +} + +func (p *parser) parseModifierExpr(me *ModifierExpr) error { + if !isIdentPrefix(p.lex.Token) { + return fmt.Errorf(`ModifierExpr: unexpected token %q; want "ident"`, p.lex.Token) + } + + me.Op = strings.ToLower(p.lex.Token) + + if err := p.lex.Next(); err != nil { + return err + } + if isBinaryOpJoinModifier(me.Op) && p.lex.Token != "(" { + // join modifier may miss ident list. + return nil + } + args, err := p.parseIdentList() + if err != nil { + return err + } + me.Args = args + return nil +} + +func (p *parser) parseIdentList() ([]string, error) { + if p.lex.Token != "(" { + return nil, fmt.Errorf(`identList: unexpected token %q; want "("`, p.lex.Token) + } + var idents []string + for { + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == ")" { + goto closeParensLabel + } + if !isIdentPrefix(p.lex.Token) { + return nil, fmt.Errorf(`identList: unexpected token %q; want "ident"`, p.lex.Token) + } + idents = append(idents, p.lex.Token) + if err := p.lex.Next(); err != nil { + return nil, err + } + switch p.lex.Token { + case ",": + continue + case ")": + goto closeParensLabel + default: + return nil, fmt.Errorf(`identList: unexpected token %q; want ",", ")"`, p.lex.Token) + } + } + +closeParensLabel: + if err := p.lex.Next(); err != nil { + return nil, err + } + return idents, nil +} + +func (p *parser) parseArgListExpr() ([]Expr, error) { + if p.lex.Token != "(" { + return nil, fmt.Errorf(`argList: unexpected token %q; want "("`, p.lex.Token) + } + var args []Expr + for { + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == ")" { + goto closeParensLabel + } + expr, err := p.parseExpr() + if err != nil { + return nil, err + } + args = append(args, expr) + switch p.lex.Token { + case ",": + continue + case ")": + goto closeParensLabel + default: + return nil, fmt.Errorf(`argList: unexpected token %q; want ",", ")"`, p.lex.Token) + } + } + +closeParensLabel: + if err := p.lex.Next(); err != nil { + return nil, err + } + return args, nil +} + +func getWithArgExpr(was []*withArgExpr, name string) *withArgExpr { + // Scan wes backwards, since certain expressions may override + // previously defined expressions + for i := len(was) - 1; i >= 0; i-- { + wa := was[i] + if wa.Name == name { + return wa + } + } + return nil +} + +func (p *parser) parseLabelFilters() ([]*labelFilterExpr, error) { + if p.lex.Token != "{" { + return nil, fmt.Errorf(`labelFilters: unexpected token %q; want "{"`, p.lex.Token) + } + + var lfes []*labelFilterExpr + for { + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token == "}" { + goto closeBracesLabel + } + lfe, err := p.parseLabelFilterExpr() + if err != nil { + return nil, err + } + lfes = append(lfes, lfe) + switch p.lex.Token { + case ",": + continue + case "}": + goto closeBracesLabel + default: + return nil, fmt.Errorf(`labelFilters: unexpected token %q; want ",", "}"`, p.lex.Token) + } + } + +closeBracesLabel: + if err := p.lex.Next(); err != nil { + return nil, err + } + return lfes, nil +} + +func (p *parser) parseLabelFilterExpr() (*labelFilterExpr, error) { + if !isIdentPrefix(p.lex.Token) { + return nil, fmt.Errorf(`labelFilterExpr: unexpected token %q; want "ident"`, p.lex.Token) + } + var lfe labelFilterExpr + lfe.Label = p.lex.Token + if err := p.lex.Next(); err != nil { + return nil, err + } + + switch p.lex.Token { + case "=": + // Nothing to do. + case "!=": + lfe.IsNegative = true + case "=~": + lfe.IsRegexp = true + case "!~": + lfe.IsNegative = true + lfe.IsRegexp = true + case ",", "}": + return &lfe, nil + default: + return nil, fmt.Errorf(`labelFilterExpr: unexpected token %q; want "=", "!=", "=~", "!~", ",", "}"`, p.lex.Token) + } + + if err := p.lex.Next(); err != nil { + return nil, err + } + se, err := p.parseStringExpr() + if err != nil { + return nil, err + } + lfe.Value = se + return &lfe, nil +} + +// labelFilterExpr represents `foo "bar"` expression, where is `=`, `!=`, `=~` or `!~`. +// +// This type isn't exported. +type labelFilterExpr struct { + Label string + Value *StringExpr + IsRegexp bool + IsNegative bool +} + +func (lfe *labelFilterExpr) String() string { + return fmt.Sprintf("[label=%q, value=%+v, isRegexp=%v, isNegative=%v]", lfe.Label, lfe.Value, lfe.IsRegexp, lfe.IsNegative) +} + +func (lfe *labelFilterExpr) toLabelFilter() (*LabelFilter, error) { + if lfe.Value == nil || len(lfe.Value.tokens) > 0 { + panic(fmt.Errorf("BUG: lfe.Value must be already expanded; got %v", lfe.Value)) + } + + var lf LabelFilter + lf.Label = unescapeIdent(lfe.Label) + if lf.Label == "__name__" { + lf.Value = unescapeIdent(lfe.Value.S) + } else { + lf.Value = lfe.Value.S + } + lf.IsRegexp = lfe.IsRegexp + lf.IsNegative = lfe.IsNegative + if !lf.IsRegexp { + return &lf, nil + } + + // Verify regexp. + if _, err := CompileRegexpAnchored(lfe.Value.S); err != nil { + return nil, fmt.Errorf("invalid regexp in %s=%q: %s", lf.Label, lf.Value, err) + } + return &lf, nil +} + +func (p *parser) parseWindowAndStep() (string, string, bool, error) { + if p.lex.Token != "[" { + return "", "", false, fmt.Errorf(`windowAndStep: unexpected token %q; want "["`, p.lex.Token) + } + err := p.lex.Next() + if err != nil { + return "", "", false, err + } + var window string + if !strings.HasPrefix(p.lex.Token, ":") { + window, err = p.parsePositiveDuration() + if err != nil { + return "", "", false, err + } + } + var step string + inheritStep := false + if strings.HasPrefix(p.lex.Token, ":") { + // Parse step + p.lex.Token = p.lex.Token[1:] + if p.lex.Token == "" { + if err := p.lex.Next(); err != nil { + return "", "", false, err + } + if p.lex.Token == "]" { + inheritStep = true + } + } + if p.lex.Token != "]" { + step, err = p.parsePositiveDuration() + if err != nil { + return "", "", false, err + } + } + } + if p.lex.Token != "]" { + return "", "", false, fmt.Errorf(`windowAndStep: unexpected token %q; want "]"`, p.lex.Token) + } + if err := p.lex.Next(); err != nil { + return "", "", false, err + } + return window, step, inheritStep, nil +} + +func (p *parser) parseOffset() (string, error) { + if !isOffset(p.lex.Token) { + return "", fmt.Errorf(`offset: unexpected token %q; want "offset"`, p.lex.Token) + } + if err := p.lex.Next(); err != nil { + return "", err + } + d, err := p.parseDuration() + if err != nil { + return "", err + } + return d, nil +} + +func (p *parser) parseDuration() (string, error) { + isNegative := false + if p.lex.Token == "-" { + isNegative = true + if err := p.lex.Next(); err != nil { + return "", err + } + } + if !isPositiveDuration(p.lex.Token) { + return "", fmt.Errorf(`duration: unexpected token %q; want "duration"`, p.lex.Token) + } + d := p.lex.Token + if err := p.lex.Next(); err != nil { + return "", err + } + if isNegative { + d = "-" + d + } + return d, nil +} + +func (p *parser) parsePositiveDuration() (string, error) { + d, err := p.parseDuration() + if err != nil { + return "", err + } + if strings.HasPrefix(d, "-") { + return "", fmt.Errorf("positiveDuration: expecting positive duration; got %q", d) + } + return d, nil +} + +// parseIdentExpr parses expressions starting with `ident` token. +func (p *parser) parseIdentExpr() (Expr, error) { + // Look into the next-next token in order to determine how to parse + // the current expression. + if err := p.lex.Next(); err != nil { + return nil, err + } + if isEOF(p.lex.Token) || isOffset(p.lex.Token) { + p.lex.Prev() + return p.parseMetricExpr() + } + if isIdentPrefix(p.lex.Token) { + p.lex.Prev() + if isAggrFunc(p.lex.Token) { + return p.parseAggrFuncExpr() + } + return p.parseMetricExpr() + } + if isBinaryOp(p.lex.Token) { + p.lex.Prev() + return p.parseMetricExpr() + } + switch p.lex.Token { + case "(": + p.lex.Prev() + if isAggrFunc(p.lex.Token) { + return p.parseAggrFuncExpr() + } + return p.parseFuncExpr() + case "{", "[", ")", ",": + p.lex.Prev() + return p.parseMetricExpr() + default: + return nil, fmt.Errorf(`identExpr: unexpected token %q; want "(", "{", "[", ")", ","`, p.lex.Token) + } +} + +func (p *parser) parseMetricExpr() (*MetricExpr, error) { + var me MetricExpr + if isIdentPrefix(p.lex.Token) { + var lfe labelFilterExpr + lfe.Label = "__name__" + lfe.Value = &StringExpr{ + tokens: []string{strconv.Quote(p.lex.Token)}, + } + me.labelFilters = append(me.labelFilters[:0], &lfe) + if err := p.lex.Next(); err != nil { + return nil, err + } + if p.lex.Token != "{" { + return &me, nil + } + } + lfes, err := p.parseLabelFilters() + if err != nil { + return nil, err + } + me.labelFilters = append(me.labelFilters, lfes...) + return &me, nil +} + +func (p *parser) parseRollupExpr(arg Expr) (Expr, error) { + var re RollupExpr + re.Expr = arg + if p.lex.Token == "[" { + window, step, inheritStep, err := p.parseWindowAndStep() + if err != nil { + return nil, err + } + re.Window = window + re.Step = step + re.InheritStep = inheritStep + if !isOffset(p.lex.Token) { + return &re, nil + } + } + offset, err := p.parseOffset() + if err != nil { + return nil, err + } + re.Offset = offset + return &re, nil +} + +// StringExpr represents string expression. +type StringExpr struct { + // S contains unquoted value for string expression. + S string + + // Composite string has non-empty tokens. + // They must be converted into S by expandWithExpr. + tokens []string +} + +// AppendString appends string representation of se to dst and returns the result. +func (se *StringExpr) AppendString(dst []byte) []byte { + return strconv.AppendQuote(dst, se.S) +} + +// NumberExpr represents number expression. +type NumberExpr struct { + // N is the parsed number, i.e. `1.23`, `-234`, etc. + N float64 +} + +// AppendString appends string representation of ne to dst and returns the result. +func (ne *NumberExpr) AppendString(dst []byte) []byte { + return strconv.AppendFloat(dst, ne.N, 'g', -1, 64) +} + +// parensExpr represents `(...)`. +// +// It isn't exported. +type parensExpr []Expr + +// AppendString appends string representation of pe to dst and returns the result. +func (pe parensExpr) AppendString(dst []byte) []byte { + return appendStringArgListExpr(dst, pe) +} + +// BinaryOpExpr represents binary operation. +type BinaryOpExpr struct { + // Op is the operation itself, i.e. `+`, `-`, `*`, etc. + Op string + + // Bool indicates whether `bool` modifier is present. + // For example, `foo >bool bar`. + Bool bool + + // GroupModifier contains modifier such as "on" or "ignoring". + GroupModifier ModifierExpr + + // JoinModifier contains modifier such as "group_left" or "group_right". + JoinModifier ModifierExpr + + // Left contains left arg for the `left op right` expression. + Left Expr + + // Right contains right arg for the `left op right` epxression. + Right Expr +} + +// AppendString appends string representation of be to dst and returns the result. +func (be *BinaryOpExpr) AppendString(dst []byte) []byte { + if _, ok := be.Left.(*BinaryOpExpr); ok { + dst = append(dst, '(') + dst = be.Left.AppendString(dst) + dst = append(dst, ')') + } else { + dst = be.Left.AppendString(dst) + } + dst = append(dst, ' ') + dst = append(dst, be.Op...) + if be.Bool { + dst = append(dst, " bool"...) + } + if be.GroupModifier.Op != "" { + dst = append(dst, ' ') + dst = be.GroupModifier.AppendString(dst) + } + if be.JoinModifier.Op != "" { + dst = append(dst, ' ') + dst = be.JoinModifier.AppendString(dst) + } + dst = append(dst, ' ') + if _, ok := be.Right.(*BinaryOpExpr); ok { + dst = append(dst, '(') + dst = be.Right.AppendString(dst) + dst = append(dst, ')') + } else { + dst = be.Right.AppendString(dst) + } + return dst +} + +// ModifierExpr represents MetricsQL modifier such as ` (...)` +type ModifierExpr struct { + // Op is modifier operation. + Op string + + // Args contains modifier args from parens. + Args []string +} + +// AppendString appends string representation of me to dst and returns the result. +func (me *ModifierExpr) AppendString(dst []byte) []byte { + dst = append(dst, me.Op...) + dst = append(dst, " ("...) + for i, arg := range me.Args { + dst = append(dst, arg...) + if i+1 < len(me.Args) { + dst = append(dst, ", "...) + } + } + dst = append(dst, ')') + return dst +} + +func appendStringArgListExpr(dst []byte, args []Expr) []byte { + dst = append(dst, '(') + for i, arg := range args { + dst = arg.AppendString(dst) + if i+1 < len(args) { + dst = append(dst, ", "...) + } + } + dst = append(dst, ')') + return dst +} + +// FuncExpr represetns MetricsQL function such as `foo(...)` +type FuncExpr struct { + // Name is function name. + Name string + + // Args contains function args. + Args []Expr +} + +// AppendString appends string representation of fe to dst and returns the result. +func (fe *FuncExpr) AppendString(dst []byte) []byte { + dst = append(dst, fe.Name...) + dst = appendStringArgListExpr(dst, fe.Args) + return dst +} + +// AggrFuncExpr represents aggregate function such as `sum(...) by (...)` +type AggrFuncExpr struct { + // Name is the function name. + Name string + + // Args is the function args. + Args []Expr + + // Modifier is optional modifier such as `by (...)` or `without (...)`. + Modifier ModifierExpr + + // Optional limit for the number of output time series. + // This is MetricsQL extension. + // + // Example: `sum(...) by (...) limit 10` would return maximum 10 time series. + Limit int +} + +// AppendString appends string representation of ae to dst and returns the result. +func (ae *AggrFuncExpr) AppendString(dst []byte) []byte { + dst = append(dst, ae.Name...) + dst = appendStringArgListExpr(dst, ae.Args) + if ae.Modifier.Op != "" { + dst = append(dst, ' ') + dst = ae.Modifier.AppendString(dst) + } + if ae.Limit > 0 { + dst = append(dst, " limit "...) + dst = strconv.AppendInt(dst, int64(ae.Limit), 10) + } + return dst +} + +// withExpr represents `with (...)` extension from MetricsQL. +// +// It isn't exported. +type withExpr struct { + Was []*withArgExpr + Expr Expr +} + +// AppendString appends string representation of we to dst and returns the result. +func (we *withExpr) AppendString(dst []byte) []byte { + dst = append(dst, "WITH ("...) + for i, wa := range we.Was { + dst = wa.AppendString(dst) + if i+1 < len(we.Was) { + dst = append(dst, ',') + } + } + dst = append(dst, ") "...) + dst = we.Expr.AppendString(dst) + return dst +} + +// withArgExpr represents a single entry from WITH expression. +// +// It isn't exported. +type withArgExpr struct { + Name string + Args []string + Expr Expr +} + +// AppendString appends string representation of wa to dst and returns the result. +func (wa *withArgExpr) AppendString(dst []byte) []byte { + dst = append(dst, wa.Name...) + if len(wa.Args) > 0 { + dst = append(dst, '(') + for i, arg := range wa.Args { + dst = append(dst, arg...) + if i+1 < len(wa.Args) { + dst = append(dst, ',') + } + } + dst = append(dst, ')') + } + dst = append(dst, " = "...) + dst = wa.Expr.AppendString(dst) + return dst +} + +// RollupExpr represents MetricsQL expression, which contains at least `offset` or `[...]` part. +type RollupExpr struct { + // The expression for the rollup. Usually it is MetricExpr, but may be arbitrary expr + // if subquery is used. https://prometheus.io/blog/2019/01/28/subquery-support/ + Expr Expr + + // Window contains optional window value from square brackets + // + // For example, `http_requests_total[5m]` will have Window value `5m`. + Window string + + // Offset contains optional value from `offset` part. + // + // For example, `foobar{baz="aa"} offset 5m` will have Offset value `5m`. + Offset string + + // Step contains optional step value from square brackets. + // + // For example, `foobar[1h:3m]` will have Step value '3m'. + Step string + + // If set to true, then `foo[1h:]` would print the same + // instead of `foo[1h]`. + InheritStep bool +} + +// ForSubquery returns true if re represents subquery. +func (re *RollupExpr) ForSubquery() bool { + return len(re.Step) > 0 || re.InheritStep +} + +// AppendString appends string representation of re to dst and returns the result. +func (re *RollupExpr) AppendString(dst []byte) []byte { + needParens := func() bool { + if _, ok := re.Expr.(*RollupExpr); ok { + return true + } + if _, ok := re.Expr.(*BinaryOpExpr); ok { + return true + } + if ae, ok := re.Expr.(*AggrFuncExpr); ok && ae.Modifier.Op != "" { + return true + } + return false + }() + if needParens { + dst = append(dst, '(') + } + dst = re.Expr.AppendString(dst) + if needParens { + dst = append(dst, ')') + } + if len(re.Window) > 0 || re.InheritStep || len(re.Step) > 0 { + dst = append(dst, '[') + if len(re.Window) > 0 { + dst = append(dst, re.Window...) + } + if len(re.Step) > 0 { + dst = append(dst, ':') + dst = append(dst, re.Step...) + } else if re.InheritStep { + dst = append(dst, ':') + } + dst = append(dst, ']') + } + if len(re.Offset) > 0 { + dst = append(dst, " offset "...) + dst = append(dst, re.Offset...) + } + return dst +} + +// LabelFilter represents MetricsQL label filter like `foo="bar"`. +type LabelFilter struct { + // Label contains label name for the filter. + Label string + + // Value contains unquoted value for the filter. + Value string + + // IsNegative reperesents whether the filter is negative, i.e. '!=' or '!~'. + IsNegative bool + + // IsRegexp represents whether the filter is regesp, i.e. `=~` or `!~`. + IsRegexp bool +} + +// AppendString appends string representation of me to dst and returns the result. +func (lf *LabelFilter) AppendString(dst []byte) []byte { + dst = appendEscapedIdent(dst, lf.Label) + var op string + if lf.IsNegative { + if lf.IsRegexp { + op = "!~" + } else { + op = "!=" + } + } else { + if lf.IsRegexp { + op = "=~" + } else { + op = "=" + } + } + dst = append(dst, op...) + dst = strconv.AppendQuote(dst, lf.Value) + return dst +} + +// MetricExpr represents MetricsQL metric with optional filters, i.e. `foo{...}`. +type MetricExpr struct { + // LabelFilters contains a list of label filters from curly braces. + // Metric name if present must be the first. + LabelFilters []LabelFilter + + // labelFilters must be expanded to LabelFilters by expandWithExpr. + labelFilters []*labelFilterExpr +} + +// AppendString appends string representation of me to dst and returns the result. +func (me *MetricExpr) AppendString(dst []byte) []byte { + lfs := me.LabelFilters + if len(lfs) > 0 { + lf := &lfs[0] + if lf.Label == "__name__" && !lf.IsNegative && !lf.IsRegexp { + dst = appendEscapedIdent(dst, lf.Value) + lfs = lfs[1:] + } + } + if len(lfs) > 0 { + dst = append(dst, '{') + for i := range lfs { + dst = lfs[i].AppendString(dst) + if i+1 < len(lfs) { + dst = append(dst, ", "...) + } + } + dst = append(dst, '}') + } else if len(me.LabelFilters) == 0 { + dst = append(dst, "{}"...) + } + return dst +} + +// IsEmpty returns true of me equals to `{}`. +func (me *MetricExpr) IsEmpty() bool { + return len(me.LabelFilters) == 0 +} + +func (me *MetricExpr) isOnlyMetricGroup() bool { + if !me.hasNonEmptyMetricGroup() { + return false + } + return len(me.LabelFilters) == 1 +} + +func (me *MetricExpr) hasNonEmptyMetricGroup() bool { + if len(me.LabelFilters) == 0 { + return false + } + lf := &me.LabelFilters[0] + return lf.Label == "__name__" && !lf.IsNegative && !lf.IsRegexp +} diff --git a/mateql/parser_example_test.go b/mateql/parser_example_test.go new file mode 100644 index 0000000..13f1b02 --- /dev/null +++ b/mateql/parser_example_test.go @@ -0,0 +1,34 @@ +package mateql + +import ( + "fmt" + "log" + +) + +func ExampleParse() { + expr, err := Parse(`sum(rate(foo{bar="baz"}[5m])) by (x,y)`) + if err != nil { + log.Fatalf("parse error: %s", err) + } + fmt.Printf("parsed expr: %s\n", expr.AppendString(nil)) + + ae := expr.(*AggrFuncExpr) + fmt.Printf("aggr func: name=%s, arg=%s, modifier=%s\n", ae.Name, ae.Args[0].AppendString(nil), ae.Modifier.AppendString(nil)) + + fe := ae.Args[0].(*FuncExpr) + fmt.Printf("func: name=%s, arg=%s\n", fe.Name, fe.Args[0].AppendString(nil)) + + re := fe.Args[0].(*RollupExpr) + fmt.Printf("rollup: expr=%s, window=%s\n", re.Expr.AppendString(nil), re.Window) + + me := re.Expr.(*MetricExpr) + fmt.Printf("metric: labelFilter1=%s, labelFilter2=%s", me.LabelFilters[0].AppendString(nil), me.LabelFilters[1].AppendString(nil)) + + // Output: + // parsed expr: sum(rate(foo{bar="baz"}[5m])) by (x, y) + // aggr func: name=sum, arg=rate(foo{bar="baz"}[5m]), modifier=by (x, y) + // func: name=rate, arg=foo{bar="baz"}[5m] + // rollup: expr=foo{bar="baz"}, window=5m + // metric: labelFilter1=__name__="foo", labelFilter2=bar="baz" +} diff --git a/mateql/parser_test.go b/mateql/parser_test.go new file mode 100755 index 0000000..6a2ce60 --- /dev/null +++ b/mateql/parser_test.go @@ -0,0 +1,684 @@ +package mateql + +import ( + "testing" +) + +func TestParseSuccess(t *testing.T) { + another := func(s string, sExpected string) { + t.Helper() + + e, err := Parse(s) + if err != nil { + t.Fatalf("unexpected error when parsing %q: %s", s, err) + } + res := e.AppendString(nil) + if string(res) != sExpected { + t.Fatalf("unexpected string constructed;\ngot\n%q\nwant\n%q", res, sExpected) + } + } + same := func(s string) { + t.Helper() + another(s, s) + } + + // metricExpr + same(`{}`) + same(`{}[5m]`) + same(`{}[5m:]`) + same(`{}[:]`) + another(`{}[: ]`, `{}[:]`) + same(`{}[:3s]`) + another(`{}[: 3s ]`, `{}[:3s]`) + same(`{}[5m:3s]`) + another(`{}[ 5m : 3s ]`, `{}[5m:3s]`) + same(`{} offset 5m`) + same(`{} offset -5m`) + same(`{}[5m] offset 10y`) + same(`{}[5.3m:3.4s] offset 10y`) + same(`{}[:3.4s] offset 10y`) + same(`{}[:3.4s] offset -10y`) + same(`{Foo="bAR"}`) + same(`{foo="bar"}`) + same(`{foo="bar"}[5m]`) + same(`{foo="bar"}[5m:]`) + same(`{foo="bar"}[5m:3s]`) + same(`{foo="bar"} offset 10y`) + same(`{foo="bar"} offset -10y`) + same(`{foo="bar"}[5m] offset 10y`) + same(`{foo="bar"}[5m:3s] offset 10y`) + another(`{foo="bar"}[5m] oFFSEt 10y`, `{foo="bar"}[5m] offset 10y`) + same("METRIC") + same("metric") + same("m_e:tri44:_c123") + another("-metric", "0 - metric") + same(`metric offset 10h`) + same("metric[5m]") + same("metric[5m:3s]") + same("metric[5m] offset 10h") + same("metric[5m:3s] offset 10h") + same("metric[5i:3i] offset 10i") + same(`metric{foo="bar"}`) + same(`metric{foo="bar"} offset 10h`) + same(`metric{foo!="bar"}[2d]`) + same(`metric{foo="bar"}[2d] offset 10h`) + same(`metric{foo="bar", b="sdfsdf"}[2d:3h] offset 10h`) + another(` metric { foo = "bar" } [ 2d ] offset 10h `, `metric{foo="bar"}[2d] offset 10h`) + // metric name matching keywords + same("rate") + same("RATE") + same("by") + same("BY") + same("bool") + same("BOOL") + same("unless") + same("UNLESS") + same("Ignoring") + same("with") + same("WITH") + same("With") + same("alias") + same(`alias{foo="bar"}`) + same(`aLIas{alias="aa"}`) + another(`al\ias`, `alias`) + // identifiers with with escape chars + same(`foo\ bar`) + same(`foo\-bar\{{baz\+bar="aa"}`) + another(`\x2E\x2ef\oo{b\xEF\ar="aa"}`, `\x2e.foo{b\xefar="aa"}`) + // Duplicate filters + same(`foo{__name__="bar"}`) + same(`foo{a="b", a="c", __name__="aaa", b="d"}`) + // Metric filters ending with comma + another(`m{foo="bar",}`, `m{foo="bar"}`) + // String concat in tag value + another(`m{foo="bar" + "baz"}`, `m{foo="barbaz"}`) + + // Valid regexp + same(`foo{bar=~"x"}`) + same(`foo{bar=~"^x"}`) + same(`foo{bar=~"^x$"}`) + same(`foo{bar=~"^(a[bc]|d)$"}`) + same(`foo{bar!~"x"}`) + same(`foo{bar!~"^x"}`) + same(`foo{bar!~"^x$"}`) + same(`foo{bar!~"^(a[bc]|d)$"}`) + + // stringExpr + same(`""`) + same(`"\n\t\r 12:{}[]()44"`) + another(`''`, `""`) + another("``", `""`) + another(" `foo\"b'ar` ", "\"foo\\\"b'ar\"") + another(` 'foo\'bar"BAZ' `, `"foo'bar\"BAZ"`) + // string concat + another(`"foo"+'bar'`, `"foobar"`) + + // numberExpr + same(`1`) + same(`1.23`) + same(`0.23`) + same(`1.2e+45`) + same(`1.2e-45`) + same(`-1`) + same(`-1.23`) + same(`-0.23`) + same(`-1.2e+45`) + same(`-1.2e-45`) + same(`-1.2e-45`) + another(`12.5E34`, `1.25e+35`) + another(`-.2`, `-0.2`) + another(`-.2E-2`, `-0.002`) + same(`NaN`) + another(`nan`, `NaN`) + another(`NAN`, `NaN`) + another(`nAN`, `NaN`) + another(`Inf`, `+Inf`) + another(`INF`, `+Inf`) + another(`inf`, `+Inf`) + another(`+Inf`, `+Inf`) + another(`-Inf`, `-Inf`) + another(`-inF`, `-Inf`) + + // binaryOpExpr + another(`nan == nan`, `NaN`) + another(`nan ==bool nan`, `1`) + another(`nan !=bool nan`, `0`) + another(`nan !=bool 2`, `1`) + another(`2 !=bool nan`, `1`) + another(`nan >bool nan`, `0`) + another(`nan =bool 2`, `1`) + another(`-1 >bool -inf`, `1`) + another(`-1 2`, `NaN`) + another(`1 > bool 2`, `0`) + another(`3 >= 2`, `3`) + another(`3 <= bool 2`, `0`) + another(`1 + -2 - 3`, `-4`) + another(`1 / 0 + 2`, `+Inf`) + another(`2 + -1 / 0`, `-Inf`) + another(`-1 ^ 0.5`, `NaN`) + another(`512.5 - (1 + 3) * (2 ^ 2) ^ 3`, `256.5`) + another(`1 == bool 1 != bool 24 < bool 4 > bool -1`, `1`) + another(`1 == bOOl 1 != BOOL 24 < Bool 4 > booL -1`, `1`) + another(`m1+on(foo)group_left m2`, `m1 + on (foo) group_left () m2`) + another(`M1+ON(FOO)GROUP_left M2`, `M1 + on (FOO) group_left () M2`) + same(`m1 + on (foo) group_right () m2`) + same(`m1 + on (foo, bar) group_right (x, y) m2`) + another(`m1 + on (foo, bar,) group_right (x, y,) m2`, `m1 + on (foo, bar) group_right (x, y) m2`) + same(`m1 == bool on (foo, bar) group_right (x, y) m2`) + another(`5 - 1 + 3 * 2 ^ 2 ^ 3 - 2 OR Metric {Bar= "Baz", aaa!="bb",cc=~"dd" ,zz !~"ff" } `, + `770 or Metric{Bar="Baz", aaa!="bb", cc=~"dd", zz!~"ff"}`) + same(`"foo" + bar()`) + same(`"foo" + bar{x="y"}`) + same(`("foo"[3s] + bar{x="y"})[5m:3s] offset 10s`) + same(`("foo"[3s] + bar{x="y"})[5i:3i] offset 10i`) + same(`bar + "foo" offset 3s`) + same(`bar + "foo" offset 3i`) + another(`1+2 if 2>3`, `NaN`) + another(`1+4 if 2<3`, `5`) + another(`2+6 default 3 if 2>3`, `8`) + another(`2+6 if 2>3 default NaN`, `NaN`) + another(`42 if 3>2 if 2+2<5`, `42`) + another(`42 if 3>2 if 2+2>=5`, `NaN`) + another(`1+2 ifnot 2>3`, `3`) + another(`1+4 ifnot 2<3`, `NaN`) + another(`2+6 default 3 ifnot 2>3`, `8`) + another(`2+6 ifnot 2>3 default NaN`, `8`) + another(`42 if 3>2 ifnot 2+2<5`, `NaN`) + another(`42 if 3>2 ifnot 2+2>=5`, `42`) + + // parensExpr + another(`(-foo + ((bar) / (baz))) + ((23))`, `((0 - foo) + (bar / baz)) + 23`) + another(`(FOO + ((Bar) / (baZ))) + ((23))`, `(FOO + (Bar / baZ)) + 23`) + same(`(foo, bar)`) + another(`((foo, bar),(baz))`, `((foo, bar), baz)`) + same(`(foo, (bar, baz), ((x, y), (z, y), xx))`) + another(`1+(foo, bar,)`, `1 + (foo, bar)`) + another(`((foo(bar,baz)), (1+(2)+(3,4)+()))`, `(foo(bar, baz), (3 + (3, 4)) + ())`) + same(`()`) + + // funcExpr + same(`f()`) + another(`f(x,)`, `f(x)`) + another(`-f()-Ff()`, `(0 - f()) - Ff()`) + same(`F()`) + another(`+F()`, `F()`) + another(`++F()`, `F()`) + another(`--F()`, `0 - (0 - F())`) + same(`f(http_server_request)`) + same(`f(http_server_request)[4s:5m] offset 10m`) + same(`f(http_server_request)[4i:5i] offset 10i`) + same(`F(HttpServerRequest)`) + same(`f(job, foo)`) + same(`F(Job, Foo)`) + another(` FOO (bar) + f ( m ( ),ff(1 + ( 2.5)) ,M[5m ] , "ff" )`, `FOO(bar) + f(m(), ff(3.5), M[5m], "ff")`) + // funcName matching keywords + same(`by(2)`) + same(`BY(2)`) + same(`or(2)`) + same(`OR(2)`) + same(`bool(2)`) + same(`BOOL(2)`) + same(`rate(rate(m))`) + same(`rate(rate(m[5m]))`) + same(`rate(rate(m[5m])[1h:])`) + same(`rate(rate(m[5m])[1h:3s])`) + // funcName with escape chars + same(`foo\(ba\-r()`) + + // aggrFuncExpr + same(`sum(http_server_request) by ()`) + same(`sum(http_server_request) by (job)`) + same(`sum(http_server_request) without (job, foo)`) + another(`sum(x,y,) without (a,b,)`, `sum(x, y) without (a, b)`) + another(`sum by () (xx)`, `sum(xx) by ()`) + another(`sum by (s) (xx)[5s]`, `(sum(xx) by (s))[5s]`) + another(`SUM BY (ZZ, aa) (XX)`, `sum(XX) by (ZZ, aa)`) + another(`sum without (a, b) (xx,2+2)`, `sum(xx, 4) without (a, b)`) + another(`Sum WIthout (a, B) (XX,2+2)`, `sum(XX, 4) without (a, B)`) + same(`sum(a) or sum(b)`) + same(`sum(a) by () or sum(b) without (x, y)`) + same(`sum(a) + sum(b)`) + same(`sum(x) * (1 + sum(a))`) + same(`avg(x) limit 10`) + same(`avg(x) without (z, b) limit 1`) + another(`avg by(x) (z) limit 20`, `avg(z) by (x) limit 20`) + + // All the above + another(`Sum(Ff(M) * M{X=""}[5m] Offset 7m - 123, 35) BY (X, y) * F2("Test")`, + `sum((Ff(M) * M{X=""}[5m] offset 7m) - 123, 35) by (X, y) * F2("Test")`) + another(`# comment + Sum(Ff(M) * M{X=""}[5m] Offset 7m - 123, 35) BY (X, y) # yet another comment + * F2("Test")`, + `sum((Ff(M) * M{X=""}[5m] offset 7m) - 123, 35) by (X, y) * F2("Test")`) + + // withExpr + another(`with () x`, `x`) + another(`with (x=1,) x`, `1`) + another(`with (x = m offset 5h) x + x`, `m offset 5h + m offset 5h`) + another(`with (x = m offset 5i) x + x`, `m offset 5i + m offset 5i`) + another(`with (foo = bar{x="x"}) 1`, `1`) + another(`with (foo = bar{x="x"}) "x"`, `"x"`) + another(`with (f="x") f`, `"x"`) + another(`with (foo = bar{x="x"}) x{x="y"}`, `x{x="y"}`) + another(`with (foo = bar{x="x"}) 1+1`, `2`) + another(`with (foo = bar{x="x"}) f()`, `f()`) + another(`with (foo = bar{x="x"}) sum(x)`, `sum(x)`) + another(`with (foo = bar{x="x"}) baz{foo="bar"}`, `baz{foo="bar"}`) + another(`with (foo = bar) baz`, `baz`) + another(`with (foo = bar) foo + foo{a="b"}`, `bar + bar{a="b"}`) + another(`with (foo = bar, bar=baz + f()) test`, `test`) + another(`with (ct={job="test"}) a{ct} + ct() + f({ct="x"})`, `(a{job="test"} + {job="test"}) + f({ct="x"})`) + another(`with (ct={job="test", i="bar"}) ct + {ct, x="d"} + foo{ct, ct} + ctx(1)`, + `(({job="test", i="bar"} + {job="test", i="bar", x="d"}) + foo{job="test", i="bar"}) + ctx(1)`) + another(`with (foo = bar) {__name__=~"foo"}`, `{__name__=~"foo"}`) + another(`with (foo = bar) foo{__name__="foo"}`, `bar`) + another(`with (foo = bar) {__name__="foo", x="y"}`, `bar{x="y"}`) + another(`with (foo(bar) = {__name__!="bar"}) foo(x)`, `{__name__!="bar"}`) + another(`with (foo(bar) = bar{__name__="bar"}) foo(x)`, `x`) + another(`with (foo\-bar(baz) = baz + baz) foo\-bar((x,y))`, `(x, y) + (x, y)`) + another(`with (foo\-bar(baz) = baz + baz) foo\-bar(x*y)`, `(x * y) + (x * y)`) + another(`with (foo\-bar(baz) = baz + baz) foo\-bar(x\*y)`, `x\*y + x\*y`) + another(`with (foo\-bar(b\ az) = b\ az + b\ az) foo\-bar(x\*y)`, `x\*y + x\*y`) + // override ttf to something new. + another(`with (ttf = a) ttf + b`, `a + b`) + // override ttf to ru + another(`with (ttf = ru(m, n)) ttf`, `(clamp_min(n - clamp_min(m, 0), 0) / clamp_min(n, 0)) * 100`) + + // Verify withExpr recursion and forward reference + another(`with (x = x+y, y = x+x) y ^ 2`, `((x + y) + (x + y)) ^ 2`) + another(`with (f1(x)=f2(x), f2(x)=f1(x)^2) f1(foobar)`, `f2(foobar)`) + another(`with (f1(x)=f2(x), f2(x)=f1(x)^2) f2(foobar)`, `f2(foobar) ^ 2`) + + // Verify withExpr funcs + another(`with (x() = y+1) x`, `y + 1`) + another(`with (x(foo) = foo+1) x(a)`, `a + 1`) + another(`with (x(a, b) = a + b) x(foo, bar)`, `foo + bar`) + another(`with (x(a, b) = a + b) x(foo, x(1, 2))`, `foo + 3`) + another(`with (x(a) = sum(a) by (b)) x(xx) / x(y)`, `sum(xx) by (b) / sum(y) by (b)`) + another(`with (f(a,f,x)=ff(x,f,a)) f(f(x,y,z),1,2)`, `ff(2, 1, ff(z, y, x))`) + another(`with (f(x)=1+f(x)) f(foo{bar="baz"})`, `1 + f(foo{bar="baz"})`) + another(`with (a=foo, y=bar, f(a)= a+a+y) f(x)`, `(x + x) + bar`) + another(`with (f(a, b) = m{a, b}) f({a="x", b="y"}, {c="d"})`, `m{a="x", b="y", c="d"}`) + another(`with (xx={a="x"}, f(a, b) = m{a, b}) f({xx, b="y"}, {c="d"})`, `m{a="x", b="y", c="d"}`) + another(`with (x() = {b="c"}) foo{x}`, `foo{b="c"}`) + another(`with (f(x)=x{foo="bar"} offset 5m) f(m offset 10m)`, `(m{foo="bar"} offset 10m) offset 5m`) + another(`with (f(x)=x{foo="bar",bas="a"}[5m]) f(m[10m] offset 3s)`, `(m{foo="bar", bas="a"}[10m] offset 3s)[5m]`) + another(`with (f(x)=x{foo="bar"}[5m] offset 10m) f(m{x="y"})`, `m{x="y", foo="bar"}[5m] offset 10m`) + another(`with (f(x)=x{foo="bar"}[5m] offset 10m) f({x="y", foo="bar", foo="bar"})`, `{x="y", foo="bar"}[5m] offset 10m`) + another(`with (f(m, x)=m{x}[5m] offset 10m) f(foo, {})`, `foo[5m] offset 10m`) + another(`with (f(m, x)=m{x, bar="baz"}[5m] offset 10m) f(foo, {})`, `foo{bar="baz"}[5m] offset 10m`) + another(`with (f(x)=x[5m] offset 3s) f(foo[3m]+bar)`, `(foo[3m] + bar)[5m] offset 3s`) + another(`with (f(x)=x[5m:3s] oFFsEt 1.5m) f(sum(s) by (a,b))`, `(sum(s) by (a, b))[5m:3s] offset 1.5m`) + another(`with (x="a", y=x) y+"bc"`, `"abc"`) + another(`with (x="a", y="b"+x) "we"+y+"z"+f()`, `"webaz" + f()`) + another(`with (f(x) = m{foo=x+"y", bar="y"+x, baz=x} + x) f("qwe")`, `m{foo="qwey", bar="yqwe", baz="qwe"} + "qwe"`) + another(`with (f(a)=a) f`, `f`) + another(`with (f\q(a)=a) f\q`, `fq`) + + // Verify withExpr for aggr func modifiers + another(`with (f(x) = x, y = sum(m) by (f)) y`, `sum(m) by (f)`) + another(`with (f(x) = x, y = sum(m) by (f) limit 20) y`, `sum(m) by (f) limit 20`) + another(`with (f(x) = sum(m) by (x)) f(foo)`, `sum(m) by (foo)`) + another(`with (f(x) = sum(m) by (x) limit 42) f(foo)`, `sum(m) by (foo) limit 42`) + another(`with (f(x) = sum(m) by (x)) f((foo, bar, foo))`, `sum(m) by (foo, bar)`) + another(`with (f(x) = sum(m) without (x,y)) f((a, b))`, `sum(m) without (a, b, y)`) + another(`with (f(x) = sum(m) without (y,x)) f((a, y))`, `sum(m) without (y, a)`) + another(`with (f(x,y) = a + on (x,y) group_left (y,bar) b) f(foo,())`, `a + on (foo) group_left (bar) b`) + another(`with (f(x,y) = a + on (x,y) group_left (y,bar) b) f((foo),())`, `a + on (foo) group_left (bar) b`) + another(`with (f(x,y) = a + on (x,y) group_left (y,bar) b) f((foo,xx),())`, `a + on (foo, xx) group_left (bar) b`) + + // Verify nested with exprs + another(`with (f(x) = (with(x=y) x) + x) f(z)`, `y + z`) + another(`with (x=foo) f(a, with (y=x) y)`, `f(a, foo)`) + another(`with (x=foo) a * x + (with (y=x) y) / y`, `(a * foo) + (foo / y)`) + another(`with (x = with (y = foo) y + x) x/x`, `(foo + x) / (foo + x)`) + another(`with ( + x = {foo="bar"}, + q = m{x, y="1"}, + f(x) = + with ( + z(y) = x + y * q + ) + z(foo) / f(x) + ) + f(a)`, `(a + (foo * m{foo="bar", y="1"})) / f(a)`) + + // complex withExpr + another(`WITH ( + treshold = (0.9), + commonFilters = {job="cacher", instance=~"1.2.3.4"}, + hits = rate(cache{type="hit", commonFilters}[5m]), + miss = rate(cache{type="miss", commonFilters}[5m]), + sumByInstance(arg) = sum(arg) by (instance), + hitRatio = sumByInstance(hits) / sumByInstance(hits + miss) + ) + hitRatio < treshold`, + `(sum(rate(cache{type="hit", job="cacher", instance=~"1.2.3.4"}[5m])) by (instance) / sum(rate(cache{type="hit", job="cacher", instance=~"1.2.3.4"}[5m]) + rate(cache{type="miss", job="cacher", instance=~"1.2.3.4"}[5m])) by (instance)) < 0.9`) + another(`WITH ( + x2(x) = x^2, + f(x, y) = x2(x) + x*y + x2(y) + ) + f(a, 3) + `, `((a ^ 2) + (a * 3)) + 9`) + another(`WITH ( + x2(x) = x^2, + f(x, y) = x2(x) + x*y + x2(y) + ) + f(2, 3) + `, `19`) + another(`WITH ( + commonFilters = {instance="foo"}, + timeToFuckup(currv, maxv) = (maxv - currv) / rate(currv) + ) + timeToFuckup(diskUsage{commonFilters}, maxDiskSize{commonFilters})`, + `(maxDiskSize{instance="foo"} - diskUsage{instance="foo"}) / rate(diskUsage{instance="foo"})`) + another(`WITH ( + commonFilters = {job="foo", instance="bar"}, + sumRate(m, cf) = sum(rate(m{cf})) by (job, instance), + hitRate(hits, misses) = sumRate(hits, commonFilters) / (sumRate(hits, commonFilters) + sumRate(misses, commonFilters)) + ) + hitRate(cacheHits, cacheMisses)`, + `sum(rate(cacheHits{job="foo", instance="bar"})) by (job, instance) / (sum(rate(cacheHits{job="foo", instance="bar"})) by (job, instance) + sum(rate(cacheMisses{job="foo", instance="bar"})) by (job, instance))`) + another(`with(y=123,z=5) union(with(y=3,f(x)=x*y) f(2) + f(3), with(x=5,y=2) x*y*z)`, `union(15, 50)`) +} + +func TestParseError(t *testing.T) { + f := func(s string) { + t.Helper() + + e, err := Parse(s) + if err == nil { + t.Fatalf("expecting non-nil error when parsing %q", s) + } + if e != nil { + t.Fatalf("expecting nil expr when parsing %q", s) + } + } + + // an empty string + f("") + f(" \t\b\r\n ") + + // invalid metricExpr + f(`{__name__="ff"} offset 55`) + f(`foo[55]`) + f(`m[-5m]`) + f(`{`) + f(`foo{`) + f(`foo{bar`) + f(`foo{bar=`) + f(`foo{bar="baz"`) + f(`foo{bar="baz", `) + f(`foo{123="23"}`) + f(`foo{foo}`) + f(`foo{,}`) + f(`foo{,foo="bar"}`) + f(`foo{foo=}`) + f(`foo{foo="ba}`) + f(`foo{"foo"="bar"}`) + f(`foo{$`) + f(`foo{a $`) + f(`foo{a="b",$`) + f(`foo{a="b"}$`) + f(`[`) + f(`[]`) + f(`f[5m]$`) + f(`[5m]`) + f(`[5m] offset 4h`) + f(`m[5m] offset $`) + f(`m[5m] offset 5h $`) + f(`m[]`) + f(`m[-5m]`) + f(`m[5m:`) + f(`m[5m:-`) + f(`m[5m:-1`) + f(`m[5m:-1]`) + f(`m[5m:-1s]`) + f(`m[-5m:1s]`) + f(`m[-5m:-1s]`) + f(`m[:`) + f(`m[:-`) + f(`m[:1]`) + f(`m[:-1m]`) + f(`m[5]`) + f(`m[[5m]]`) + f(`m[foo]`) + f(`m["ff"]`) + f(`m[10m`) + f(`m[123`) + f(`m["ff`) + f(`m[(f`) + f(`fd}`) + f(`]`) + f(`m $`) + f(`m{,}`) + f(`m{x=y}`) + f(`m{x=y/5}`) + f(`m{x=y+5}`) + + // Invalid regexp + f(`foo{bar=~"x["}`) + f(`foo{bar=~"x("}`) + f(`foo{bar=~"x)"}`) + f(`foo{bar!~"x["}`) + f(`foo{bar!~"x("}`) + f(`foo{bar!~"x)"}`) + + // invalid stringExpr + f(`'`) + f(`"`) + f("`") + f(`"foo`) + f(`'foo`) + f("`foo") + f(`"foo\"bar`) + f(`'foo\'bar`) + f("`foo\\`bar") + f(`"" $`) + f(`"foo" +`) + f(`n{"foo" + m`) + + // invalid numberExpr + f(`12.`) + f(`1.2e`) + f(`23e-`) + f(`23E+`) + f(`.`) + f(`-12.`) + f(`-1.2e`) + f(`-23e-`) + f(`-23E+`) + f(`-.`) + f(`-1$$`) + f(`-$$`) + f(`+$$`) + f(`23 $$`) + + // invalid binaryOpExpr + f(`+`) + f(`1 +`) + f(`1 + 2.`) + f(`3 unless`) + f(`23 + on (foo)`) + f(`m + on (,) m`) + f(`3 * ignoring`) + f(`m * on (`) + f(`m * on (foo`) + f(`m * on (foo,`) + f(`m * on (foo,)`) + f(`m * on (,foo)`) + f(`m * on (,)`) + f(`m == bool (bar) baz`) + f(`m == bool () baz`) + f(`m * by (baz) n`) + f(`m + bool group_left m2`) + f(`m + on () group_left (`) + f(`m + on () group_left (,`) + f(`m + on () group_left (,foo`) + f(`m + on () group_left (foo,)`) + f(`m + on () group_left (,foo)`) + f(`m + on () group_left (foo)`) + f(`m + on () group_right (foo) (m`) + f(`m or ignoring () group_left () n`) + f(`1 + bool 2`) + f(`m % bool n`) + f(`m * bool baz`) + f(`M * BOoL BaZ`) + f(`foo unless ignoring (bar) group_left xxx`) + f(`foo or bool bar`) + f(`foo == bool $$`) + f(`"foo" + bar`) + + // invalid parensExpr + f(`(`) + f(`($`) + f(`(+`) + f(`(1`) + f(`(m+`) + f(`1)`) + f(`(,)`) + f(`(1)$`) + + // invalid funcExpr + f(`f $`) + f(`f($)`) + f(`f[`) + f(`f()$`) + f(`f(`) + f(`f(foo`) + f(`f(f,`) + f(`f(,`) + f(`f(,)`) + f(`f(,foo)`) + f(`f(,foo`) + f(`f(foo,$`) + f(`f() by (a)`) + f(`f without (x) (y)`) + f(`f() foo (a)`) + f(`f bar (x) (b)`) + f(`f bar (x)`) + + // invalid aggrFuncExpr + f(`sum(`) + f(`sum $`) + f(`sum [`) + f(`sum($)`) + f(`sum()$`) + f(`sum(foo) ba`) + f(`sum(foo) ba()`) + f(`sum(foo) by`) + f(`sum(foo) without x`) + f(`sum(foo) aaa`) + f(`sum(foo) aaa x`) + f(`sum() by $`) + f(`sum() by (`) + f(`sum() by ($`) + f(`sum() by (a`) + f(`sum() by (a $`) + f(`sum() by (a ]`) + f(`sum() by (a)$`) + f(`sum() by (,`) + f(`sum() by (a,$`) + f(`sum() by (,)`) + f(`sum() by (,a`) + f(`sum() by (,a)`) + f(`sum() on (b)`) + f(`sum() bool`) + f(`sum() group_left`) + f(`sum() group_right(x)`) + f(`sum ba`) + f(`sum ba ()`) + f(`sum by (`) + f(`sum by (a`) + f(`sum by (,`) + f(`sum by (,)`) + f(`sum by (,a`) + f(`sum by (,a)`) + f(`sum by (a)`) + f(`sum by (a) (`) + f(`sum by (a) [`) + f(`sum by (a) {`) + f(`sum by (a) (b`) + f(`sum by (a) (b,`) + f(`sum by (a) (,)`) + f(`avg by (a) (,b)`) + f(`sum by (x) (y) by (z)`) + f(`sum(m) by (1)`) + + // invalid withExpr + f(`with $`) + f(`with a`) + f(`with a=b c`) + f(`with (`) + f(`with (x=b)$`) + f(`with ($`) + f(`with (foo`) + f(`with (foo $`) + f(`with (x y`) + f(`with (x =`) + f(`with (x = $`) + f(`with (x= y`) + f(`with (x= y $`) + f(`with (x= y)`) + f(`with (x=(`) + f(`with (x=[)`) + f(`with (x=() x)`) + f(`with ($$)`) + f(`with (x $$`) + f(`with (x = $$)`) + f(`with (x = foo) bar{x}`) + f(`with (x = {foo="bar"}[5m]) bar{x}`) + f(`with (x = {foo="bar"} offset 5m) bar{x}`) + f(`with (x = a, x = b) c`) + f(`with (x(a, a) = b) c`) + f(`with (x=m{f="x"}) foo{x}`) + f(`with (sum = x) y`) + f(`with (rate(a) = b) c`) + f(`with (clamp_min=x) y`) + f(`with (f()`) + f(`with (a=b c=d) e`) + f(`with (f(x)=x^2) m{x}`) + f(`with (f(x)=ff()) m{x}`) + f(`with (f(x`) + f(`with (x=m) a{x} + b`) + f(`with (x=m) b + a{x}`) + f(`with (x=m) f(b, a{x})`) + f(`with (x=m) sum(a{x})`) + f(`with (x=m) (a{x})`) + f(`with (f(a)=a) f(1, 2)`) + f(`with (f(x)=x{foo="bar"}) f(1)`) + f(`with (f(x)=x{foo="bar"}) f(m + n)`) + f(`with (f = with`) + f(`with (,)`) + f(`with (1) 2`) + f(`with (f(1)=2) 3`) + f(`with (f(,)=x) x`) + f(`with (x(a) = {b="c"}) foo{x}`) + f(`with (f(x) = m{foo=xx}) f("qwe")`) + f(`a + with(f(x)=x) f(1,2)`) + f(`with (f(x) = sum(m) by (x)) f({foo="bar"})`) + f(`with (f(x) = sum(m) by (x)) f((xx(), {foo="bar"}))`) + f(`with (f(x) = m + on (x) n) f(xx())`) + f(`with (f(x) = m + on (a) group_right (x) n) f(xx())`) +} diff --git a/mateql/regexp_cache.go b/mateql/regexp_cache.go new file mode 100755 index 0000000..30346cf --- /dev/null +++ b/mateql/regexp_cache.go @@ -0,0 +1,99 @@ +package mateql + +import ( + "regexp" + "sync" + "sync/atomic" +) + +// CompileRegexpAnchored returns compiled regexp `^re$`. +func CompileRegexpAnchored(re string) (*regexp.Regexp, error) { + reAnchored := "^(?:" + re + ")$" + return CompileRegexp(reAnchored) +} + +// CompileRegexp returns compile regexp re. +func CompileRegexp(re string) (*regexp.Regexp, error) { + rcv := regexpCacheV.Get(re) + if rcv != nil { + return rcv.r, rcv.err + } + r, err := regexp.Compile(re) + rcv = ®expCacheValue{ + r: r, + err: err, + } + regexpCacheV.Put(re, rcv) + return rcv.r, rcv.err +} + +var regexpCacheV = func() *regexpCache { + rc := ®expCache{ + m: make(map[string]*regexpCacheValue), + } + return rc +}() + +const regexpCacheMaxLen = 10e3 + +type regexpCacheValue struct { + r *regexp.Regexp + err error +} + +type regexpCache struct { + // Move atomic counters to the top of struct for 8-byte alignment on 32-bit arch. + // See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/212 + + requests uint64 + misses uint64 + + m map[string]*regexpCacheValue + mu sync.RWMutex +} + +func (rc *regexpCache) Requests() uint64 { + return atomic.LoadUint64(&rc.requests) +} + +func (rc *regexpCache) Misses() uint64 { + return atomic.LoadUint64(&rc.misses) +} + +func (rc *regexpCache) Len() uint64 { + rc.mu.RLock() + n := len(rc.m) + rc.mu.RUnlock() + return uint64(n) +} + +func (rc *regexpCache) Get(regexp string) *regexpCacheValue { + atomic.AddUint64(&rc.requests, 1) + + rc.mu.RLock() + rcv := rc.m[regexp] + rc.mu.RUnlock() + + if rcv == nil { + atomic.AddUint64(&rc.misses, 1) + } + return rcv +} + +func (rc *regexpCache) Put(regexp string, rcv *regexpCacheValue) { + rc.mu.Lock() + overflow := len(rc.m) - regexpCacheMaxLen + if overflow > 0 { + // Remove 10% of items from the cache. + overflow = int(float64(len(rc.m)) * 0.1) + for k := range rc.m { + delete(rc.m, k) + overflow-- + if overflow <= 0 { + break + } + } + } + rc.m[regexp] = rcv + rc.mu.Unlock() +} diff --git a/mateql/rollup.go b/mateql/rollup.go new file mode 100755 index 0000000..f5823d0 --- /dev/null +++ b/mateql/rollup.go @@ -0,0 +1,75 @@ +package mateql + +import ( + "strings" +) + +var rollupFuncs = map[string]bool{ + // Standard rollup funcs from PromQL. + // See funcs accepting range-vector on https://prometheus.io/docs/prometheus/latest/querying/functions/ . + "changes": true, + "delta": true, + "deriv": true, + "deriv_fast": true, + "holt_winters": true, + "idelta": true, + "increase": true, + "irate": true, + "predict_linear": true, + "rate": true, + "resets": true, + "avg_over_time": true, + "min_over_time": true, + "max_over_time": true, + "sum_over_time": true, + "count_over_time": true, + "quantile_over_time": true, + "stddev_over_time": true, + "stdvar_over_time": true, + "absent_over_time": true, + + // Additional rollup funcs. + "default_rollup": true, + "range_over_time": true, + "sum2_over_time": true, + "geomean_over_time": true, + "first_over_time": true, + "last_over_time": true, + "distinct_over_time": true, + "increases_over_time": true, + "decreases_over_time": true, + "integrate": true, + "ideriv": true, + "lifetime": true, + "lag": true, + "scrape_interval": true, + "tmin_over_time": true, + "tmax_over_time": true, + "share_le_over_time": true, + "share_gt_over_time": true, + "histogram_over_time": true, + "rollup": true, + "rollup_rate": true, + "rollup_deriv": true, + "rollup_delta": true, + "rollup_increase": true, + "rollup_candlestick": true, + "aggr_over_time": true, + "hoeffding_bound_upper": true, + "hoeffding_bound_lower": true, + "ascent_over_time": true, + "descent_over_time": true, + + // `timestamp` func has been moved here because it must work properly with offsets and samples unaligned to the current step. + // See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/415 for details. + "timestamp": true, + + // See https://en.wikipedia.org/wiki/Mode_(statistics) + "mode_over_time": true, +} + +// IsRollupFunc returns whether funcName is known rollup function. +func IsRollupFunc(funcName string) bool { + s := strings.ToLower(funcName) + return rollupFuncs[s] +} diff --git a/mateql/transform.go b/mateql/transform.go new file mode 100755 index 0000000..5d2764f --- /dev/null +++ b/mateql/transform.go @@ -0,0 +1,90 @@ +package mateql + +import ( + "strings" +) + +var transformFuncs = map[string]bool{ + // Standard promql funcs + // See funcs accepting instant-vector on https://prometheus.io/docs/prometheus/latest/querying/functions/ . + "abs": true, + "absent": true, + "ceil": true, + "clamp_max": true, + "clamp_min": true, + "day_of_month": true, + "day_of_week": true, + "days_in_month": true, + "exp": true, + "floor": true, + "histogram_quantile": true, + "hour": true, + "label_join": true, + "label_replace": true, + "ln": true, + "log2": true, + "log10": true, + "minute": true, + "month": true, + "round": true, + "scalar": true, + "sort": true, + "sort_desc": true, + "sqrt": true, + "time": true, + // "timestamp" has been moved to rollup funcs. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/415 + "vector": true, + "year": true, + + // New funcs from MetricsQL + "label_set": true, + "label_map": true, + "label_del": true, + "label_keep": true, + "label_copy": true, + "label_move": true, + "label_transform": true, + "label_value": true, + "label_match": true, + "label_mismatch": true, + "union": true, + "": true, // empty func is a synonim to union + "keep_last_value": true, + "keep_next_value": true, + "interpolate": true, + "start": true, + "end": true, + "step": true, + "running_sum": true, + "running_max": true, + "running_min": true, + "running_avg": true, + "range_sum": true, + "range_max": true, + "range_min": true, + "range_avg": true, + "range_first": true, + "range_last": true, + "range_quantile": true, + "smooth_exponential": true, + "remove_resets": true, + "rand": true, + "rand_normal": true, + "rand_exponential": true, + "pi": true, + "sin": true, + "cos": true, + "asin": true, + "acos": true, + "prometheus_buckets": true, + "histogram_share": true, + "sort_by_label": true, + "sort_by_label_desc": true, +} + +// IsTransformFunc returns whether funcName is known transform function. +func IsTransformFunc(funcName string) bool { + s := strings.ToLower(funcName) + return transformFuncs[s] + +} diff --git a/mateql/utils.go b/mateql/utils.go new file mode 100644 index 0000000..db7b22e --- /dev/null +++ b/mateql/utils.go @@ -0,0 +1,12 @@ +package mateql + +// ExpandWithExprs expands WITH expressions inside q and returns the resulting +// PromQL without WITH expressions. +func ExpandWithExprs(q string) (string, error) { + e, err := Parse(q) + if err != nil { + return "", err + } + buf := e.AppendString(nil) + return string(buf), nil +} diff --git a/mateql/utils_example_test.go b/mateql/utils_example_test.go new file mode 100644 index 0000000..36fdc6d --- /dev/null +++ b/mateql/utils_example_test.go @@ -0,0 +1,26 @@ +package mateql + +import ( + "fmt" + "log" +) + +func ExampleExpandWithExprs() { + // mql can contain arbitrary MetricsQL extensions - see https://github.com/VictoriaMetrics/VictoriaMetrics/wiki/MetricsQL + mql := `WITH ( + commonFilters = {job="$job", instance="$instance"}, + f(a, b) = 100*(a/b), + ) + f(disk_free_bytes{commonFilters}, disk_total_bytes{commonFilters})` + + // Convert mql to PromQL + pql, err := ExpandWithExprs(mql) + if err != nil { + log.Fatalf("cannot expand with expressions: %s", err) + } + + fmt.Printf("%s\n", pql) + + // Output: + // 100 * (disk_free_bytes{job="$job", instance="$instance"} / disk_total_bytes{job="$job", instance="$instance"}) +} diff --git a/mateql/utils_test.go b/mateql/utils_test.go new file mode 100644 index 0000000..70cc485 --- /dev/null +++ b/mateql/utils_test.go @@ -0,0 +1,43 @@ +package mateql + +import ( + "testing" +) + +func TestExpandWithExprsSuccess(t *testing.T) { + f := func(q, qExpected string) { + t.Helper() + for i := 0; i < 3; i++ { + qExpanded, err := ExpandWithExprs(q) + if err != nil { + t.Fatalf("unexpected error when expanding %q: %s", q, err) + } + if qExpanded != qExpected { + t.Fatalf("unexpected expanded expression for %q;\ngot\n%q\nwant\n%q", q, qExpanded, qExpected) + } + } + } + + f(`1`, `1`) + f(`foobar`, `foobar`) + f(`with (x = 1) x+x`, `2`) + f(`with (f(x) = x*x) 3+f(2)+2`, `9`) +} + +func TestExpandWithExprsError(t *testing.T) { + f := func(q string) { + t.Helper() + for i := 0; i < 3; i++ { + qExpanded, err := ExpandWithExprs(q) + if err == nil { + t.Fatalf("expecting non-nil error when expanding %q", q) + } + if qExpanded != "" { + t.Fatalf("unexpected non-empty qExpanded=%q", qExpanded) + } + } + } + + f(``) + f(` with (`) +} diff --git a/prometheus/converter.go b/prometheus/converter.go new file mode 100644 index 0000000..be53d33 --- /dev/null +++ b/prometheus/converter.go @@ -0,0 +1,129 @@ +package prometheus + +import ( + "bytes" + "fmt" + "strings" + "sync" + + "github.com/zhihu/promate/mateql" +) + +var builderPool = &sync.Pool{ + New: func() interface{} { + return bytes.NewBuffer(make([]byte, 1024)) + }, +} + +type LabelFilters []mateql.LabelFilter + +func (l LabelFilters) Build(name string) (selector string) { + builder := builderPool.Get().(*bytes.Buffer) + defer builderPool.Put(builder) + builder.Reset() + + builder.WriteString(`{__name__="`) + builder.WriteString(name) + builder.WriteByte('"') + + for _, filter := range l { + if filter.IsRegexp { + builder.WriteByte(',') + builder.WriteString(filter.Label) + builder.WriteString(`=~"`) + builder.WriteString(filter.Value) + builder.WriteByte('"') + } else if filter.IsNegative { + builder.WriteByte(',') + builder.WriteString(filter.Label) + builder.WriteString(`!="`) + builder.WriteString(filter.Value) + builder.WriteByte('"') + } else { + builder.WriteByte(',') + builder.WriteString(filter.Label) + builder.WriteString(`="`) + builder.WriteString(filter.Value) + builder.WriteByte('"') + } + } + + builder.WriteString(`}`) + return builder.String() +} + +func ConvertGraphiteTarget(query string, terminal bool) (string, LabelFilters) { + nodes := strings.Split(query, ".") + length := len(nodes) + name := strings.ReplaceAll(nodes[0], "-", "_") + + filters := make(LabelFilters, 0, length) + for i := 1; i < length; i++ { + node := nodes[i] + if node == "*" { + continue + } + value, isRegex, err := globToRegexPattern(node) + if err != nil { + return "", nil + } + + filters = append(filters, mateql.LabelFilter{ + Label: labelName(name, i), + Value: value, + IsRegexp: isRegex, + }) + } + if terminal { + filters = append(filters, mateql.LabelFilter{ + Label: labelName(name, length), + Value: "", + }) + } + + return name, filters +} + +func ConvertQueryLabel(query string) (prefix, label string, fast bool) { + builder := builderPool.Get().(*bytes.Buffer) + defer builderPool.Put(builder) + builder.Reset() + + nodes := strings.Split(query, ".") + length := len(nodes) + name := strings.ReplaceAll(nodes[0], "-", "_") + + builder.WriteString(name) + for i := 1; i < length-1; i++ { + builder.WriteByte('.') + builder.WriteString(nodes[i]) + } + builder.WriteByte('.') + + return builder.String(), labelName(name, length-1), length == 2 +} + +func ConvertPrometheusMetric(name string, metric map[string]string) string { + // Detect error response https://github.com/VictoriaMetrics/VictoriaMetrics/issues/360 + __name__, ok := metric["__name__"] + if ok && __name__ != name { + return "" + } + + builder := builderPool.Get().(*bytes.Buffer) + defer builderPool.Put(builder) + builder.Reset() + + builder.WriteString(name) + for i := 1; i < len(metric)+1; i++ { + if value, ok := metric[fmt.Sprintf("__%s_g%d__", name, i)]; ok { + builder.WriteByte('.') + builder.WriteString(value) + } + } + return builder.String() +} + +func labelName(name string, i int) string { + return fmt.Sprintf("__%s_g%d__", name, i) +} diff --git a/prometheus/converter_test.go b/prometheus/converter_test.go new file mode 100644 index 0000000..f0de95f --- /dev/null +++ b/prometheus/converter_test.go @@ -0,0 +1,259 @@ +package prometheus + +import ( + "reflect" + "testing" +) + +func TestConvertGraphiteTarget(t *testing.T) { + type args struct { + query string + terminal bool + } + tests := []struct { + name string + args args + want string + want1 LabelFilters + }{ + { + name: "a.b.c", + args: args{ + query: "a.b.c", + terminal: false, + }, + want: "a", + want1: LabelFilters{ + { + Label: "__a_g1__", + Value: "b", + }, + { + Label: "__a_g2__", + Value: "c", + }, + }, + }, + { + name: "a.b.c with terminal", + args: args{ + query: "a.b.c", + terminal: true, + }, + want: "a", + want1: LabelFilters{ + { + Label: "__a_g1__", + Value: "b", + }, + { + Label: "__a_g2__", + Value: "c", + }, + { + Label: "__a_g3__", + Value: "", + }, + }, + }, + { + name: "a.*.c", + args: args{ + query: "a.*.c", + terminal: false, + }, + want: "a", + want1: LabelFilters{ + { + Label: "__a_g2__", + Value: "c", + }, + }, + }, + { + name: "a-a.b", + args: args{ + query: "a-a.*.c", + terminal: false, + }, + want: "a_a", + want1: LabelFilters{ + { + Label: "__a_a_g2__", + Value: "c", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, got1 := ConvertGraphiteTarget(tt.args.query, tt.args.terminal) + if got != tt.want { + t.Errorf("ConvertGraphiteTarget() got = %v, want %v", got, tt.want) + } + if !reflect.DeepEqual(got1, tt.want1) { + t.Errorf("ConvertGraphiteTarget() got1 = %v, want %v", got1, tt.want1) + } + }) + } +} + +func TestConvertPrometheusMetric(t *testing.T) { + type args struct { + name string + metric map[string]string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "a.b.c", + args: args{ + name: "a", + metric: map[string]string{ + "__a_g1__": "b", + "__a_g2__": "c", + }, + }, + want: "a.b.c", + }, + { + name: "unknown name", + args: args{ + name: "a", + metric: map[string]string{ + "__name__": "unknown", + "__a_g1__": "b", + "__a_g2__": "c", + }, + }, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := ConvertPrometheusMetric(tt.args.name, tt.args.metric); got != tt.want { + t.Errorf("ConvertPrometheusMetric() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestConvertQueryLabel(t *testing.T) { + type args struct { + query string + } + tests := []struct { + name string + args args + wantPrefix string + wantLabel string + wantFast bool + }{ + { + name: "a.b.*", + args: args{ + query: "a.b.*", + }, + wantPrefix: "a.b.", + wantLabel: "__a_g2__", + wantFast: false, + }, + { + name: "a.* fast", + args: args{ + query: "a.*", + }, + wantPrefix: "a.", + wantLabel: "__a_g1__", + wantFast: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotPrefix, gotLabel, gotFast := ConvertQueryLabel(tt.args.query) + if gotPrefix != tt.wantPrefix { + t.Errorf("ConvertQueryLabel() gotPrefix = %v, want %v", gotPrefix, tt.wantPrefix) + } + if gotLabel != tt.wantLabel { + t.Errorf("ConvertQueryLabel() gotLabel = %v, want %v", gotLabel, tt.wantLabel) + } + if gotFast != tt.wantFast { + t.Errorf("ConvertQueryLabel() gotFast = %v, want %v", gotFast, tt.wantFast) + } + }) + } +} + +func TestLabelFilters_Build(t *testing.T) { + type args struct { + name string + } + tests := []struct { + name string + l LabelFilters + args args + wantSelector string + }{ + { + name: "success", + l: LabelFilters{ + { + Label: "g1", + Value: "v1", + }, + { + Label: "g2", + Value: "v2", + IsRegexp: true, + }, + { + Label: "g3", + Value: "v3", + IsNegative: true, + }, + }, + args: args{ + name: "name", + }, + wantSelector: `{__name__="name",g1="v1",g2=~"v2",g3!="v3"}`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if gotSelector := tt.l.Build(tt.args.name); gotSelector != tt.wantSelector { + t.Errorf("Build() = %v, want %v", gotSelector, tt.wantSelector) + } + }) + } +} + +func Test_labelName(t *testing.T) { + type args struct { + name string + i int + } + tests := []struct { + name string + args args + want string + }{ + { + name: "a", + args: args{ + name: "a", + i: 1, + }, + want: "__a_g1__", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := labelName(tt.args.name, tt.args.i); got != tt.want { + t.Errorf("labelName() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/prometheus/glob.go b/prometheus/glob.go new file mode 100644 index 0000000..cc5a6ee --- /dev/null +++ b/prometheus/glob.go @@ -0,0 +1,138 @@ +package prometheus + +// Copyright (c) 2019 Uber Technologies, Inc. +// [Apache License 2.0](licenses/m3.license.txt) + +// Fork from https://github.com/m3db/m3/blob/e098969502ff32abe4d6be536cc7c1cf06885a85/src/query/graphite/graphite/glob.go + +import ( + "bytes" + "fmt" + "strings" +) + +const ( + ValidIdentifierRunes = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789" + "$-_'|<>%#/:" +) + +type Pattern struct { + buff bytes.Buffer + eval rune + lastWriteLen int +} + +func (p *Pattern) String() string { + return p.buff.String() +} + +func (p *Pattern) Evaluate(r rune) { + p.eval = r +} + +func (p *Pattern) LastEvaluate() rune { + return p.eval +} + +func (p *Pattern) WriteRune(r rune) { + p.buff.WriteRune(r) + p.lastWriteLen = 1 +} + +func (p *Pattern) WriteString(str string) { + p.buff.WriteString(str) + p.lastWriteLen = len(str) +} + +func (p *Pattern) UnwriteLast() { + p.buff.Truncate(p.buff.Len() - p.lastWriteLen) + p.lastWriteLen = 0 +} + +func globToRegexPattern(glob string) (string, bool, error) { + var ( + pattern Pattern + escaping = false + regexed = false + ) + + groupStartStack := []rune{rune(0)} // rune(0) indicates pattern is not in a group + for i, r := range glob { + pattern.Evaluate(r) + + if escaping { + pattern.WriteRune(r) + escaping = false + continue + } + + switch r { + case '\\': + escaping = true + pattern.WriteRune('\\') + case '.': + // Match hierarchy separator + pattern.WriteString("\\.+") + regexed = true + case '?': + // Match anything except the hierarchy separator + pattern.WriteString("[^.]") + regexed = true + case '*': + // Match everything up to the next hierarchy separator + pattern.WriteString("[^.]*") + regexed = true + case '{': + // Begin non-capturing group + pattern.WriteString("(") + groupStartStack = append(groupStartStack, r) + regexed = true + case '}': + // End non-capturing group + priorGroupStart := groupStartStack[len(groupStartStack)-1] + if priorGroupStart != '{' { + return "", false, fmt.Errorf("invalid '}' at %d, no prior for '{' in %s", i, glob) + } + + pattern.WriteRune(')') + groupStartStack = groupStartStack[:len(groupStartStack)-1] + case '[': + // Begin character range + pattern.WriteRune('[') + groupStartStack = append(groupStartStack, r) + regexed = true + case ']': + // End character range + priorGroupStart := groupStartStack[len(groupStartStack)-1] + if priorGroupStart != '[' { + return "", false, fmt.Errorf("invalid ']' at %d, no prior for '[' in %s", i, glob) + } + + pattern.WriteRune(']') + groupStartStack = groupStartStack[:len(groupStartStack)-1] + case '<', '>', '\'', '$': + pattern.WriteRune('\\') + pattern.WriteRune(r) + case '|': + pattern.WriteRune(r) + regexed = true + case ',': + // Commas are part of the pattern if they appear in a group + if groupStartStack[len(groupStartStack)-1] == '{' { + pattern.WriteRune('|') + } else { + return "", false, fmt.Errorf("invalid ',' outside of matching group at pos %d in %s", i, glob) + } + default: + if !strings.ContainsRune(ValidIdentifierRunes, r) { + return "", false, fmt.Errorf("invalid character %c at pos %d in %s", r, i, glob) + } + pattern.WriteRune(r) + } + } + + if len(groupStartStack) > 1 { + return "", false, fmt.Errorf("unbalanced '%c' in %s", groupStartStack[len(groupStartStack)-1], glob) + } + + return pattern.buff.String(), regexed, nil +} diff --git a/prometheus/glob_test.go b/prometheus/glob_test.go new file mode 100644 index 0000000..dcaeb76 --- /dev/null +++ b/prometheus/glob_test.go @@ -0,0 +1,127 @@ +package prometheus + +// Copyright (c) 2019 Uber Technologies, Inc. +// [Apache License 2.0](licenses/m3.license.txt) + +// Fork from https://github.com/m3db/m3/blob/e098969502ff32abe4d6be536cc7c1cf06885a85/src/query/graphite/graphite/glob_test.go + +import ( + "fmt" + "regexp" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGlobToRegexPattern(t *testing.T) { + tests := []struct { + glob string + isRegex bool + regex string + }{ + { + glob: "barbaz", + isRegex: false, + regex: "barbaz", + }, + { + glob: "barbaz:quxqaz", + isRegex: false, + regex: "barbaz:quxqaz", + }, + { + glob: "foo\\+bar.'baz<1001>'.qux", + isRegex: true, + regex: "foo\\+bar\\.+\\'baz\\<1001\\>\\'\\.+qux", + }, + { + glob: "foo.host.me{1,2,3}.*", + isRegex: true, + regex: "foo\\.+host\\.+me(1|2|3)\\.+[^.]*", + }, + { + glob: "bar.zed.whatever[0-9].*.*.bar", + isRegex: true, + regex: "bar\\.+zed\\.+whatever[0-9]\\.+[^.]*\\.+[^.]*\\.+bar", + }, + { + glob: "foo{0[3-9],1[0-9],20}", + isRegex: true, + regex: "foo(0[3-9]|1[0-9]|20)", + }, + { + glob: "foo{0[3-9],1[0-9],20}:bar", + isRegex: true, + regex: "foo(0[3-9]|1[0-9]|20):bar", + }, + } + + for _, test := range tests { + pattern, isRegex, err := globToRegexPattern(test.glob) + require.NoError(t, err) + assert.Equal(t, test.isRegex, isRegex) + assert.Equal(t, test.regex, pattern, "bad pattern for %s", test.glob) + } +} + +func TestGlobToRegexPatternErrors(t *testing.T) { + tests := []struct { + glob string + err string + }{ + {"foo.host{1,2", "unbalanced '{' in foo.host{1,2"}, + {"foo.host{1,2]", "invalid ']' at 12, no prior for '[' in foo.host{1,2]"}, + {"foo.,", "invalid ',' outside of matching group at pos 4 in foo.,"}, + {"foo.host{a[0-}", "invalid '}' at 13, no prior for '{' in foo.host{a[0-}"}, + } + + for _, test := range tests { + _, _, err := globToRegexPattern(test.glob) + require.Error(t, err) + assert.Equal(t, test.err, err.Error(), "invalid error for %s", test.glob) + } +} + +func TestCompileGlob(t *testing.T) { + tests := []struct { + glob string + match bool + toMatch []string + }{ + {"foo.bar.timers.baz??-bar.qux.query.count", true, + []string{ + "foo.bar.timers.baz01-bar.qux.query.count", + "foo.bar.timers.baz24-bar.qux.query.count"}}, + {"foo.bar.timers.baz??-bar.qux.query.count", false, + []string{ + "foo.bar.timers.baz-bar.qux.query.count", + "foo.bar.timers.baz.0-bar.qux.query.count", + "foo.bar.timers.baz021-bar.qux.query.count", + "foo.bar.timers.baz991-bar.qux.query.count"}}, + {"foo.host{1,2}.*", true, + []string{"foo.host1.zed", "foo.host2.whatever"}}, + {"foo.*.zed.*", true, + []string{"foo.bar.zed.eq", "foo.zed.zed.zed"}}, + {"foo.*.zed.*", false, + []string{"bar.bar.zed.zed", "foo.bar.zed", "foo.bar.zed.eq.monk"}}, + {"foo.host{1,2}.zed", true, + []string{"foo.host1.zed", "foo.host2.zed"}}, + {"foo.host{1,2}.zed", false, + []string{"foo.host3.zed", "foo.hostA.zed", "blad.host1.zed", "foo.host1.zed.z"}}, + {"optic{0[3-9],1[0-9],20}", true, + []string{"optic03", "optic10", "optic20"}}, + {"optic{0[3-9],1[0-9],20}", false, + []string{"optic01", "optic21", "optic201", "optic031"}}, + } + + for _, test := range tests { + rePattern, _, err := globToRegexPattern(test.glob) + require.NoError(t, err) + re := regexp.MustCompile(fmt.Sprintf("^%s$", rePattern)) + for _, s := range test.toMatch { + matched := re.MatchString(s) + assert.Equal(t, test.match, matched, "incorrect match between %s and %s", test.glob, s) + } + } +} diff --git a/prometheus/mateql.go b/prometheus/mateql.go new file mode 100644 index 0000000..7f97180 --- /dev/null +++ b/prometheus/mateql.go @@ -0,0 +1,63 @@ +package prometheus + +import ( + "fmt" + "strconv" + "strings" + + "github.com/zhihu/promate/mateql" +) + +func CovertMateQuery(query string, terminal bool) (string, error) { + expr, err := mateql.Parse(query) + if err != nil { + return "", err + } + _, expr = covertExpr("", expr, terminal) + return string(expr.AppendString(nil)), nil +} + +func covertExpr(name string, expr mateql.Expr, terminal bool) (string, mateql.Expr) { + switch e := expr.(type) { + case *mateql.MetricExpr: + var filters []mateql.LabelFilter + for i, filter := range e.LabelFilters { + if filter.Label == "__name__" && strings.Contains(filter.Value, ".") { + name, filters = ConvertGraphiteTarget(filter.Value, terminal) + if name == "" || filters == nil { + continue + } + e.LabelFilters = append(e.LabelFilters, filters...) + filter.Value = name + } + e.LabelFilters[i] = filter + } + return name, e + case *mateql.RollupExpr: + name, e.Expr = covertExpr(name, e.Expr, terminal) + return name, e + case *mateql.FuncExpr: + for i, arg := range e.Args { + name, e.Args[i] = covertExpr(name, arg, terminal) + } + return name, e + case *mateql.AggrFuncExpr: + for i, arg := range e.Args { + name, e.Args[i] = covertExpr(name, arg, terminal) + } + for i, arg := range e.Modifier.Args { + if len(arg) > 1 && arg[0] == 'g' && len(name) > 0 { + if gi, err := strconv.Atoi(arg[1:]); err == nil { + e.Modifier.Args[i] = fmt.Sprintf("__%s_g%d__", name, gi) + } + } + } + return name, e + case *mateql.BinaryOpExpr: + name, e.Left = covertExpr(name, e.Left, terminal) + name, e.Right = covertExpr(name, e.Right, terminal) + return name, e + default: + return name, e + } +} diff --git a/prometheus/mateql_test.go b/prometheus/mateql_test.go new file mode 100644 index 0000000..1793aa9 --- /dev/null +++ b/prometheus/mateql_test.go @@ -0,0 +1,76 @@ +package prometheus + +import ( + "testing" +) + +func TestCovertMateQuery(t *testing.T) { + type args struct { + query string + terminal bool + } + tests := []struct { + name string + args args + want string + wantErr bool + }{ + { + name: `sum`, + args: args{ + query: `sum(rate(a.b.c.d)) by (g1,g2)`, + terminal: false, + }, + want: `sum(rate(a{__a_g1__="b", __a_g2__="c", __a_g3__="d"})) by (__a_g1__, __a_g2__)`, + wantErr: false, + }, + { + name: `sum with duration`, + args: args{ + query: `sum(rate(a.b.c.d[5m])) by (g1,g2)`, + terminal: false, + }, + want: `sum(rate(a{__a_g1__="b", __a_g2__="c", __a_g3__="d"}[5m])) by (__a_g1__, __a_g2__)`, + wantErr: false, + }, + { + name: `sum with char range`, + args: args{ + query: `sum(rate(a.[bc][cd].d)) by (g1,g2)`, + terminal: false, + }, + want: `sum(rate(a{__a_g1__=~"[bc][cd]", __a_g2__="d"})) by (__a_g1__, __a_g2__)`, + wantErr: false, + }, + { + name: `sum with wildcard`, + args: args{ + query: `sum(rate(a.b*.c.d)) by (g1,g2)`, + terminal: false, + }, + want: `sum(rate(a{__a_g1__=~"b[^.]*", __a_g2__="c", __a_g3__="d"})) by (__a_g1__, __a_g2__)`, + wantErr: false, + }, + { + name: `sum with value list`, + args: args{ + query: `sum(rate(a.{b,c}.c.d)) by (g1,g2)`, + terminal: false, + }, + want: `sum(rate(a{__a_g1__=~"(b|c)", __a_g2__="c", __a_g3__="d"})) by (__a_g1__, __a_g2__)`, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := CovertMateQuery(tt.args.query, tt.args.terminal) + if (err != nil) != tt.wantErr { + t.Errorf("CovertMateQuery() error = %v, wantErr %v", err, tt.wantErr) + return + } + if got != tt.want { + t.Errorf("CovertMateQuery() got = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/prometheus/response.go b/prometheus/response.go new file mode 100644 index 0000000..43d896b --- /dev/null +++ b/prometheus/response.go @@ -0,0 +1,62 @@ +package prometheus + +import ( + "encoding/json" + "fmt" + "strconv" +) + +type ValuesResponse struct { + Status string `json:"status"` + Data []string `json:"data"` +} + +type MatrixResponse struct { + Status string `json:"status"` + Data MatrixResult `json:"data"` +} + +type MatrixResult struct { + Result []MatrixData `json:"result"` + ResultType string `json:"resultType"` +} + +type MatrixData struct { + Metric map[string]string `json:"metric"` + Values []MatrixPair `json:"values"` +} + +type MatrixPair struct { + Timestamp float64 + Value float64 +} + +func (m *MatrixPair) UnmarshalJSON(data []byte) error { + arr := make([]interface{}, 0) + err := json.Unmarshal(data, &arr) + if err != nil { + return err + } + + if len(arr) != 2 { + return fmt.Errorf("length mismatch, got %v, expected 2", len(arr)) + } + + timestamp, ok := arr[0].(float64) + if !ok { + return fmt.Errorf("type mismatch for element[0/1], expected 'float64', got '%T', str=%v", arr[0], string(data)) + } + m.Timestamp = timestamp + + str, ok := arr[1].(string) + if !ok { + return fmt.Errorf("type mismatch for element[1/1], expected 'string', got '%T', str=%v", arr[1], string(data)) + } + + f, err := strconv.ParseFloat(str, 64) + if err != nil { + return err + } + m.Value = f + return nil +} diff --git a/prometheus/response_test.go b/prometheus/response_test.go new file mode 100644 index 0000000..dd7ef39 --- /dev/null +++ b/prometheus/response_test.go @@ -0,0 +1,42 @@ +package prometheus + +import "testing" + +func TestMatrixPair_UnmarshalJSON(t *testing.T) { + type fields struct { + Timestamp float64 + Value float64 + } + type args struct { + data []byte + } + tests := []struct { + name string + fields fields + args args + wantErr bool + }{ + { + name: "success", + fields: fields{ + Timestamp: 1590249600, + Value: 1, + }, + args: args{ + []byte(`[1590249600,"1"]`), + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + m := &MatrixPair{ + Timestamp: tt.fields.Timestamp, + Value: tt.fields.Value, + } + if err := m.UnmarshalJSON(tt.args.data); (err != nil) != tt.wantErr { + t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +}