diff --git a/config/config.go b/config/config.go index 780b49a64e4..57768b50efc 100644 --- a/config/config.go +++ b/config/config.go @@ -479,8 +479,13 @@ type GlobalConfig struct { // Keep no more than this many dropped targets per job. // 0 means no limit. KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"` - // Allow UTF8 Metric and Label Names. + // Allow UTF8 Metric and Label Names. Can be blank in config files but must + // have a value if a ScrepeConfig is created programmatically. MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"` + // Metric name escaping mode to request through content negotiation. Can be + // blank in config files but must have a value if a ScrepeConfig is created + // programmatically. + MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"` } // ScrapeProtocol represents supported protocol for scraping metrics. @@ -719,8 +724,13 @@ type ScrapeConfig struct { // Keep no more than this many dropped targets per job. // 0 means no limit. KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"` - // Allow UTF8 Metric and Label Names. + // Allow UTF8 Metric and Label Names. Can be blank in config files but must + // have a value if a ScrepeConfig is created programmatically. MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"` + // Metric name escaping mode to request through content negotiation. Can be + // blank in config files but must have a value if a ScrepeConfig is created + // programmatically. + MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"` // We cannot do proper Go type embedding below as the parser will then parse // values arbitrarily into the overflow maps of further-down types. @@ -841,13 +851,48 @@ func (c *ScrapeConfig) Validate(globalConfig GlobalConfig) error { if model.NameValidationScheme != model.UTF8Validation { return errors.New("model.NameValidationScheme must be set to UTF8") } + switch globalConfig.MetricNameValidationScheme { - case "", LegacyValidationConfig, UTF8ValidationConfig: + case "": + globalConfig.MetricNameValidationScheme = UTF8ValidationConfig + case LegacyValidationConfig, UTF8ValidationConfig: default: - return fmt.Errorf("unknown name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme) + return fmt.Errorf("unknown global name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme) } - if c.MetricNameValidationScheme == "" { + // Scrapeconfig validation scheme matches global if left blank. + switch c.MetricNameValidationScheme { + case "": c.MetricNameValidationScheme = globalConfig.MetricNameValidationScheme + case LegacyValidationConfig, UTF8ValidationConfig: + default: + return fmt.Errorf("unknown scrape config name validation method specified, must be either 'legacy' or 'utf8', got %s", c.MetricNameValidationScheme) + } + + // Escaping scheme is based on the validation scheme if left blank. + switch globalConfig.MetricNameEscapingScheme { + case "": + if globalConfig.MetricNameValidationScheme == LegacyValidationConfig { + globalConfig.MetricNameEscapingScheme = model.EscapeUnderscores + } else { + globalConfig.MetricNameEscapingScheme = model.AllowUTF8 + } + case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues: + default: + return fmt.Errorf("unknown global name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, globalConfig.MetricNameValidationScheme) + } + + if c.MetricNameEscapingScheme == "" { + c.MetricNameEscapingScheme = globalConfig.MetricNameEscapingScheme + } + + switch c.MetricNameEscapingScheme { + case model.AllowUTF8: + if c.MetricNameValidationScheme != UTF8ValidationConfig { + return errors.New("utf8 metric names requested but validation scheme is not set to UTF8") + } + case model.EscapeUnderscores, model.EscapeDots, model.EscapeValues: + default: + return fmt.Errorf("unknown scrape config name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, c.MetricNameValidationScheme) } return nil @@ -858,6 +903,20 @@ func (c *ScrapeConfig) MarshalYAML() (interface{}, error) { return discovery.MarshalYAMLWithInlineConfigs(c) } +// ToValidationScheme returns the validation scheme for the given string config value. +func ToValidationScheme(s string) (validationScheme model.ValidationScheme, err error) { + switch s { + case UTF8ValidationConfig: + validationScheme = model.UTF8Validation + case LegacyValidationConfig: + validationScheme = model.LegacyValidation + default: + return model.UTF8Validation, fmt.Errorf("invalid metric name validation scheme, %s", s) + } + + return validationScheme, nil +} + // StorageConfig configures runtime reloadable configuration options. type StorageConfig struct { TSDBConfig *TSDBConfig `yaml:"tsdb,omitempty"` diff --git a/config/config_test.go b/config/config_test.go index be21a62c070..5725ec2f1a1 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -202,20 +202,22 @@ var expectedConf = &Config{ { JobName: "prometheus", - HonorLabels: true, - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFallbackProtocol: PrometheusText0_0_4, - ScrapeFailureLogFile: "testdata/fail_prom.log", + HonorLabels: true, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFallbackProtocol: PrometheusText0_0_4, + ScrapeFailureLogFile: "testdata/fail_prom.log", + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -317,18 +319,20 @@ var expectedConf = &Config{ { JobName: "service-x", - HonorTimestamps: true, - ScrapeInterval: model.Duration(50 * time.Second), - ScrapeTimeout: model.Duration(5 * time.Second), - EnableCompression: true, - BodySizeLimit: 10 * units.MiB, - SampleLimit: 1000, - TargetLimit: 35, - LabelLimit: 35, - LabelNameLengthLimit: 210, - LabelValueLengthLimit: 210, - ScrapeProtocols: []ScrapeProtocol{PrometheusText0_0_4}, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(50 * time.Second), + ScrapeTimeout: model.Duration(5 * time.Second), + EnableCompression: true, + BodySizeLimit: 10 * units.MiB, + SampleLimit: 1000, + TargetLimit: 35, + LabelLimit: 35, + LabelNameLengthLimit: 210, + LabelValueLengthLimit: 210, + ScrapeProtocols: []ScrapeProtocol{PrometheusText0_0_4}, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, HTTPClientConfig: config.HTTPClientConfig{ BasicAuth: &config.BasicAuth{ @@ -415,18 +419,20 @@ var expectedConf = &Config{ { JobName: "service-y", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -471,18 +477,20 @@ var expectedConf = &Config{ { JobName: "service-z", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: model.Duration(10 * time.Second), - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: model.Duration(10 * time.Second), + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: "/metrics", Scheme: "http", @@ -505,18 +513,20 @@ var expectedConf = &Config{ { JobName: "service-kubernetes", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -545,18 +555,20 @@ var expectedConf = &Config{ { JobName: "service-kubernetes-namespaces", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -585,18 +597,20 @@ var expectedConf = &Config{ { JobName: "service-kuma", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -615,18 +629,20 @@ var expectedConf = &Config{ { JobName: "service-marathon", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -653,18 +669,20 @@ var expectedConf = &Config{ { JobName: "service-nomad", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -688,18 +706,20 @@ var expectedConf = &Config{ { JobName: "service-ec2", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -730,18 +750,20 @@ var expectedConf = &Config{ { JobName: "service-lightsail", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -762,18 +784,20 @@ var expectedConf = &Config{ { JobName: "service-azure", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -797,18 +821,20 @@ var expectedConf = &Config{ { JobName: "service-nerve", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -825,18 +851,20 @@ var expectedConf = &Config{ { JobName: "0123service-xxx", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -856,18 +884,20 @@ var expectedConf = &Config{ { JobName: "badfederation", - HonorTimestamps: false, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: false, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: "/federate", Scheme: DefaultScrapeConfig.Scheme, @@ -887,18 +917,20 @@ var expectedConf = &Config{ { JobName: "測試", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -918,18 +950,20 @@ var expectedConf = &Config{ { JobName: "httpsd", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -946,18 +980,20 @@ var expectedConf = &Config{ { JobName: "service-triton", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -982,18 +1018,20 @@ var expectedConf = &Config{ { JobName: "digitalocean-droplets", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1017,18 +1055,20 @@ var expectedConf = &Config{ { JobName: "docker", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1049,18 +1089,20 @@ var expectedConf = &Config{ { JobName: "dockerswarm", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1080,18 +1122,20 @@ var expectedConf = &Config{ { JobName: "service-openstack", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1115,18 +1159,20 @@ var expectedConf = &Config{ { JobName: "service-puppetdb", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1152,19 +1198,21 @@ var expectedConf = &Config{ }, }, { - JobName: "hetzner", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + JobName: "hetzner", + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1210,18 +1258,20 @@ var expectedConf = &Config{ { JobName: "service-eureka", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1238,18 +1288,20 @@ var expectedConf = &Config{ { JobName: "ovhcloud", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, HTTPClientConfig: config.DefaultHTTPClientConfig, MetricsPath: DefaultScrapeConfig.MetricsPath, @@ -1277,18 +1329,20 @@ var expectedConf = &Config{ { JobName: "scaleway", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, HTTPClientConfig: config.DefaultHTTPClientConfig, MetricsPath: DefaultScrapeConfig.MetricsPath, @@ -1322,18 +1376,20 @@ var expectedConf = &Config{ { JobName: "linode-instances", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1358,18 +1414,20 @@ var expectedConf = &Config{ { JobName: "uyuni", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, HTTPClientConfig: config.DefaultHTTPClientConfig, MetricsPath: DefaultScrapeConfig.MetricsPath, @@ -1387,19 +1445,21 @@ var expectedConf = &Config{ }, }, { - JobName: "ionos", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + JobName: "ionos", + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -1421,18 +1481,20 @@ var expectedConf = &Config{ { JobName: "vultr", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - EnableCompression: true, - BodySizeLimit: globBodySizeLimit, - SampleLimit: globSampleLimit, - TargetLimit: globTargetLimit, - LabelLimit: globLabelLimit, - LabelNameLengthLimit: globLabelNameLengthLimit, - LabelValueLengthLimit: globLabelValueLengthLimit, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, - ScrapeFailureLogFile: globScrapeFailureLogFile, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + EnableCompression: true, + BodySizeLimit: globBodySizeLimit, + SampleLimit: globSampleLimit, + TargetLimit: globTargetLimit, + LabelLimit: globLabelLimit, + LabelNameLengthLimit: globLabelNameLengthLimit, + LabelValueLengthLimit: globLabelValueLengthLimit, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + ScrapeFailureLogFile: globScrapeFailureLogFile, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -2174,6 +2236,10 @@ var expectedErrors = []struct { filename: "scrape_config_files_fallback_scrape_protocol2.bad.yml", errMsg: `unmarshal errors`, }, + { + filename: "scrape_config_utf8_conflicting.bad.yml", + errMsg: `utf8 metric names requested but validation scheme is not set to UTF8`, + }, } func TestBadConfigs(t *testing.T) { @@ -2253,11 +2319,13 @@ func TestEmptyGlobalBlock(t *testing.T) { func TestGetScrapeConfigs(t *testing.T) { sc := func(jobName string, scrapeInterval, scrapeTimeout model.Duration) *ScrapeConfig { return &ScrapeConfig{ - JobName: jobName, - HonorTimestamps: true, - ScrapeInterval: scrapeInterval, - ScrapeTimeout: scrapeTimeout, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + JobName: jobName, + HonorTimestamps: true, + ScrapeInterval: scrapeInterval, + ScrapeTimeout: scrapeTimeout, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: "/metrics", Scheme: "http", @@ -2290,12 +2358,12 @@ func TestGetScrapeConfigs(t *testing.T) { expectedResult: []*ScrapeConfig{sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second))}, }, { - name: "An global config that only include a scrape config file.", + name: "A global config that only include a scrape config file.", configFile: "testdata/scrape_config_files_only.good.yml", expectedResult: []*ScrapeConfig{sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second))}, }, { - name: "An global config that combine scrape config files and scrape configs.", + name: "A global config that combine scrape config files and scrape configs.", configFile: "testdata/scrape_config_files_combined.good.yml", expectedResult: []*ScrapeConfig{ sc("node", model.Duration(60*time.Second), model.Duration(10*time.Second)), @@ -2304,16 +2372,18 @@ func TestGetScrapeConfigs(t *testing.T) { }, }, { - name: "An global config that includes a scrape config file with globs", + name: "A global config that includes a scrape config file with globs", configFile: "testdata/scrape_config_files_glob.good.yml", expectedResult: []*ScrapeConfig{ { JobName: "prometheus", - HonorTimestamps: true, - ScrapeInterval: model.Duration(60 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + HonorTimestamps: true, + ScrapeInterval: model.Duration(60 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, MetricsPath: DefaultScrapeConfig.MetricsPath, Scheme: DefaultScrapeConfig.Scheme, @@ -2343,10 +2413,12 @@ func TestGetScrapeConfigs(t *testing.T) { { JobName: "node", - HonorTimestamps: true, - ScrapeInterval: model.Duration(15 * time.Second), - ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, - ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + HonorTimestamps: true, + ScrapeInterval: model.Duration(15 * time.Second), + ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, + ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols, + MetricNameValidationScheme: UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, HTTPClientConfig: config.HTTPClientConfig{ TLSConfig: config.TLSConfig{ @@ -2380,17 +2452,17 @@ func TestGetScrapeConfigs(t *testing.T) { }, }, { - name: "An global config that includes twice the same scrape configs.", + name: "A global config that includes twice the same scrape configs.", configFile: "testdata/scrape_config_files_double_import.bad.yml", expectedError: `found multiple scrape configs with job name "prometheus"`, }, { - name: "An global config that includes a scrape config identical to a scrape config in the main file.", + name: "A global config that includes a scrape config identical to a scrape config in the main file.", configFile: "testdata/scrape_config_files_duplicate.bad.yml", expectedError: `found multiple scrape configs with job name "prometheus"`, }, { - name: "An global config that includes a scrape config file with errors.", + name: "A global config that includes a scrape config file with errors.", configFile: "testdata/scrape_config_files_global.bad.yml", expectedError: `scrape timeout greater than scrape interval for scrape config with job name "prometheus"`, }, @@ -2437,7 +2509,7 @@ func TestScrapeConfigNameValidationSettings(t *testing.T) { { name: "blank config implies default", inputFile: "scrape_config_default_validation_mode", - expectScheme: "", + expectScheme: "utf8", }, { name: "global setting implies local settings", @@ -2472,6 +2544,56 @@ func TestScrapeConfigNameValidationSettings(t *testing.T) { } } +func TestScrapeConfigNameEscapingSettings(t *testing.T) { + tests := []struct { + name string + inputFile string + expectValidationScheme string + expectEscapingScheme string + }{ + { + name: "blank config implies default", + inputFile: "scrape_config_default_validation_mode", + expectValidationScheme: "utf8", + expectEscapingScheme: "allow-utf-8", + }, + { + name: "global setting implies local settings", + inputFile: "scrape_config_global_validation_mode", + expectValidationScheme: "legacy", + expectEscapingScheme: "dots", + }, + { + name: "local setting", + inputFile: "scrape_config_local_validation_mode", + expectValidationScheme: "legacy", + expectEscapingScheme: "values", + }, + { + name: "local setting overrides global setting", + inputFile: "scrape_config_local_global_validation_mode", + expectValidationScheme: "utf8", + expectEscapingScheme: "dots", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + want, err := LoadFile(fmt.Sprintf("testdata/%s.yml", tc.inputFile), false, promslog.NewNopLogger()) + require.NoError(t, err) + + out, err := yaml.Marshal(want) + + require.NoError(t, err) + got := &Config{} + require.NoError(t, yaml.UnmarshalStrict(out, got)) + + require.Equal(t, tc.expectValidationScheme, got.ScrapeConfigs[0].MetricNameValidationScheme) + require.Equal(t, tc.expectEscapingScheme, got.ScrapeConfigs[0].MetricNameEscapingScheme) + }) + } +} + func TestScrapeProtocolHeader(t *testing.T) { tests := []struct { name string diff --git a/config/testdata/scrape_config_global_validation_mode.yml b/config/testdata/scrape_config_global_validation_mode.yml index e9b0618c709..fb4baf7b07c 100644 --- a/config/testdata/scrape_config_global_validation_mode.yml +++ b/config/testdata/scrape_config_global_validation_mode.yml @@ -1,4 +1,5 @@ global: metric_name_validation_scheme: legacy + metric_name_escaping_scheme: dots scrape_configs: - job_name: prometheus diff --git a/config/testdata/scrape_config_local_global_validation_mode.yml b/config/testdata/scrape_config_local_global_validation_mode.yml index 30b54834a56..29cd2b41409 100644 --- a/config/testdata/scrape_config_local_global_validation_mode.yml +++ b/config/testdata/scrape_config_local_global_validation_mode.yml @@ -1,5 +1,7 @@ global: metric_name_validation_scheme: legacy + metric_name_escaping_scheme: values scrape_configs: - job_name: prometheus metric_name_validation_scheme: utf8 + metric_name_escaping_scheme: dots diff --git a/config/testdata/scrape_config_local_validation_mode.yml b/config/testdata/scrape_config_local_validation_mode.yml index 90279ff0818..b4d1ff05df9 100644 --- a/config/testdata/scrape_config_local_validation_mode.yml +++ b/config/testdata/scrape_config_local_validation_mode.yml @@ -1,3 +1,4 @@ scrape_configs: - job_name: prometheus metric_name_validation_scheme: legacy + metric_name_escaping_scheme: values diff --git a/config/testdata/scrape_config_utf8_conflicting.bad.yml b/config/testdata/scrape_config_utf8_conflicting.bad.yml new file mode 100644 index 00000000000..3f1b8f87ac3 --- /dev/null +++ b/config/testdata/scrape_config_utf8_conflicting.bad.yml @@ -0,0 +1,5 @@ +global: + metric_name_validation_scheme: legacy + metric_name_escaping_scheme: allow-utf-8 +scrape_configs: + - job_name: prometheus diff --git a/docs/configuration/configuration.md b/docs/configuration/configuration.md index b6cdbe51bd4..8c18625e1ba 100644 --- a/docs/configuration/configuration.md +++ b/docs/configuration/configuration.md @@ -469,6 +469,22 @@ metric_relabel_configs: # underscores. [ metric_name_validation_scheme | default "utf8" ] +# Specifies the character escaping scheme that will be requested when scraping +# for metric and label names that do not conform to the legacy Prometheus +# character set. Available options are: +# * `allow-utf-8`: Full UTF-8 support, no escaping needed. +# * `underscores`: Escape all legacy-invalid characters to underscores. +# * `dots`: Escapes dots to `_dot_`, underscores to `__`, and all other +# legacy-invalid characters to underscores. +# * `values`: Prepend the name with `U__` and replace all invalid +# characters with their unicode value, surrounded by underscores. Single +# underscores are replaced with double underscores. +# e.g. "U__my_2e_dotted_2e_name". +# If this value is left blank, Prometheus will default to `allow-utf-8` if the +# validation scheme for the current scrape config is set to utf8, or +# `underscores` if the validation scheme is set to `legacy`. +[ metric_name_validation_scheme | default "utf8" ] + # Limit on total number of positive and negative buckets allowed in a single # native histogram. The resolution of a histogram with more buckets will be # reduced until the number of buckets is within the limit. If the limit cannot diff --git a/scrape/scrape.go b/scrape/scrape.go index 14c442fc258..eb385510c51 100644 --- a/scrape/scrape.go +++ b/scrape/scrape.go @@ -102,6 +102,9 @@ type scrapePool struct { scrapeFailureLogger FailureLogger scrapeFailureLoggerMtx sync.RWMutex + + validationScheme model.ValidationScheme + escapingScheme model.EscapingScheme } type labelLimits struct { @@ -124,7 +127,6 @@ type scrapeLoopOptions struct { timeout time.Duration alwaysScrapeClassicHist bool convertClassicHistToNHCB bool - validationScheme model.ValidationScheme fallbackScrapeProtocol string mrc []*relabel.Config @@ -147,6 +149,16 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed return nil, fmt.Errorf("error creating HTTP client: %w", err) } + validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme) + if err != nil { + return nil, fmt.Errorf("invalid metric name validation scheme: %w", err) + } + var escapingScheme model.EscapingScheme + escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme) + if err != nil { + return nil, fmt.Errorf("invalid metric name escaping scheme, %w", err) + } + ctx, cancel := context.WithCancel(context.Background()) sp := &scrapePool{ cancel: cancel, @@ -160,6 +172,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed logger: logger, metrics: metrics, httpOpts: options.HTTPClientOptions, + validationScheme: validationScheme, + escapingScheme: escapingScheme, } sp.newLoop = func(opts scrapeLoopOptions) loop { // Update the targets retrieval function for metadata to a new scrape cache. @@ -201,7 +215,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed options.PassMetadataInContext, metrics, options.skipOffsetting, - opts.validationScheme, + sp.validationScheme, + sp.escapingScheme, opts.fallbackScrapeProtocol, ) } @@ -309,6 +324,17 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error { sp.config = cfg oldClient := sp.client sp.client = client + validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme) + if err != nil { + return fmt.Errorf("invalid metric name validation scheme: %w", err) + } + sp.validationScheme = validationScheme + var escapingScheme model.EscapingScheme + escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme) + if err != nil { + return fmt.Errorf("invalid metric name escaping scheme, %w", err) + } + sp.escapingScheme = escapingScheme sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit)) @@ -344,11 +370,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) { convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB ) - validationScheme := model.UTF8Validation - if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig { - validationScheme = model.LegacyValidation - } - sp.targetMtx.Lock() forcedErr := sp.refreshTargetLimitErr() @@ -369,7 +390,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) { client: sp.client, timeout: targetTimeout, bodySizeLimit: bodySizeLimit, - acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme), + acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme), acceptEncodingHeader: acceptEncodingHeader(enableCompression), metrics: sp.metrics, } @@ -388,7 +409,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) { cache: cache, interval: targetInterval, timeout: targetTimeout, - validationScheme: validationScheme, fallbackScrapeProtocol: fallbackScrapeProtocol, alwaysScrapeClassicHist: alwaysScrapeClassicHist, convertClassicHistToNHCB: convertClassicHistToNHCB, @@ -506,11 +526,6 @@ func (sp *scrapePool) sync(targets []*Target) { convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB ) - validationScheme := model.UTF8Validation - if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig { - validationScheme = model.LegacyValidation - } - sp.targetMtx.Lock() for _, t := range targets { hash := t.hash() @@ -526,7 +541,7 @@ func (sp *scrapePool) sync(targets []*Target) { client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit, - acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme), + acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme), acceptEncodingHeader: acceptEncodingHeader(enableCompression), metrics: sp.metrics, } @@ -546,7 +561,6 @@ func (sp *scrapePool) sync(targets []*Target) { timeout: timeout, alwaysScrapeClassicHist: alwaysScrapeClassicHist, convertClassicHistToNHCB: convertClassicHistToNHCB, - validationScheme: validationScheme, fallbackScrapeProtocol: fallbackScrapeProtocol, }) if err != nil { @@ -777,13 +791,14 @@ var errBodySizeLimit = errors.New("body size limit exceeded") // acceptHeader transforms preference from the options into specific header values as // https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines. // No validation is here, we expect scrape protocols to be validated already. -func acceptHeader(sps []config.ScrapeProtocol, scheme model.ValidationScheme) string { +func acceptHeader(sps []config.ScrapeProtocol, scheme model.EscapingScheme) string { var vals []string weight := len(config.ScrapeProtocolsHeaders) + 1 for _, sp := range sps { val := config.ScrapeProtocolsHeaders[sp] - if scheme == model.UTF8Validation { - val += ";" + config.UTF8NamesHeader + // Escaping header is only valid for newer versions of the text formats. + if sp == config.PrometheusText1_0_0 || sp == config.OpenMetricsText1_0_0 { + val += ";" + model.EscapingKey + "=" + scheme.String() } val += fmt.Sprintf(";q=0.%d", weight) vals = append(vals, val) @@ -911,6 +926,7 @@ type scrapeLoop struct { alwaysScrapeClassicHist bool convertClassicHistToNHCB bool validationScheme model.ValidationScheme + escapingScheme model.EscapingScheme fallbackScrapeProtocol string // Feature flagged options. @@ -1230,6 +1246,7 @@ func newScrapeLoop(ctx context.Context, metrics *scrapeMetrics, skipOffsetting bool, validationScheme model.ValidationScheme, + escapingScheme model.EscapingScheme, fallbackScrapeProtocol string, ) *scrapeLoop { if l == nil { @@ -1284,6 +1301,7 @@ func newScrapeLoop(ctx context.Context, metrics: metrics, skipOffsetting: skipOffsetting, validationScheme: validationScheme, + escapingScheme: escapingScheme, fallbackScrapeProtocol: fallbackScrapeProtocol, } sl.ctx, sl.cancel = context.WithCancel(ctx) diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index 692fd75bcf2..6ac21ad46d8 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -82,10 +82,14 @@ func newTestScrapeMetrics(t testing.TB) *scrapeMetrics { func TestNewScrapePool(t *testing.T) { var ( - app = &nopAppendable{} - cfg = &config.ScrapeConfig{} - sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) + app = &nopAppendable{} + cfg = &config.ScrapeConfig{ + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + } + sp, err = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) ) + require.NoError(t, err) a, ok := sp.appendable.(*nopAppendable) require.True(t, ok, "Failure to append.") @@ -321,8 +325,10 @@ func TestDroppedTargetsList(t *testing.T) { var ( app = &nopAppendable{} cfg = &config.ScrapeConfig{ - JobName: "dropMe", - ScrapeInterval: model.Duration(1), + JobName: "dropMe", + ScrapeInterval: model.Duration(1), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, RelabelConfigs: []*relabel.Config{ { Action: relabel.Drop, @@ -366,8 +372,10 @@ func TestDiscoveredLabelsUpdate(t *testing.T) { // These are used when syncing so need this to avoid a panic. sp.config = &config.ScrapeConfig{ - ScrapeInterval: model.Duration(1), - ScrapeTimeout: model.Duration(1), + ScrapeInterval: model.Duration(1), + ScrapeTimeout: model.Duration(1), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } sp.activeTargets = make(map[uint64]*Target) t1 := &Target{ @@ -496,8 +504,10 @@ func TestScrapePoolReload(t *testing.T) { stopped := map[uint64]bool{} reloadCfg := &config.ScrapeConfig{ - ScrapeInterval: model.Duration(3 * time.Second), - ScrapeTimeout: model.Duration(2 * time.Second), + ScrapeInterval: model.Duration(3 * time.Second), + ScrapeTimeout: model.Duration(2 * time.Second), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } // On starting to run, new loops created on reload check whether their preceding // equivalents have been stopped. @@ -588,8 +598,10 @@ func TestScrapePoolReload(t *testing.T) { func TestScrapePoolReloadPreserveRelabeledIntervalTimeout(t *testing.T) { reloadCfg := &config.ScrapeConfig{ - ScrapeInterval: model.Duration(3 * time.Second), - ScrapeTimeout: model.Duration(2 * time.Second), + ScrapeInterval: model.Duration(3 * time.Second), + ScrapeTimeout: model.Duration(2 * time.Second), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } newLoop := func(opts scrapeLoopOptions) loop { l := &testLoop{interval: opts.interval, timeout: opts.timeout} @@ -687,9 +699,11 @@ func TestScrapePoolTargetLimit(t *testing.T) { reloadWithLimit := func(l uint) { limit = l require.NoError(t, sp.reload(&config.ScrapeConfig{ - ScrapeInterval: model.Duration(3 * time.Second), - ScrapeTimeout: model.Duration(2 * time.Second), - TargetLimit: l, + ScrapeInterval: model.Duration(3 * time.Second), + ScrapeTimeout: model.Duration(2 * time.Second), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + TargetLimit: l, })) } @@ -776,7 +790,10 @@ func TestScrapePoolTargetLimit(t *testing.T) { } func TestScrapePoolAppender(t *testing.T) { - cfg := &config.ScrapeConfig{} + cfg := &config.ScrapeConfig{ + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + } app := &nopAppendable{} sp, _ := newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) @@ -849,7 +866,12 @@ func TestScrapePoolRaces(t *testing.T) { interval, _ := model.ParseDuration("1s") timeout, _ := model.ParseDuration("500ms") newConfig := func() *config.ScrapeConfig { - return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout} + return &config.ScrapeConfig{ + ScrapeInterval: interval, + ScrapeTimeout: timeout, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + } } sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) tgts := []*targetgroup.Group{ @@ -919,8 +941,10 @@ func TestScrapePoolScrapeLoopsStarted(t *testing.T) { } require.NoError(t, sp.reload(&config.ScrapeConfig{ - ScrapeInterval: model.Duration(3 * time.Second), - ScrapeTimeout: model.Duration(2 * time.Second), + ScrapeInterval: model.Duration(3 * time.Second), + ScrapeTimeout: model.Duration(2 * time.Second), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, })) sp.Sync(tgs) @@ -963,7 +987,8 @@ func newBasicScrapeLoopWithFallback(t testing.TB, ctx context.Context, scraper s false, newTestScrapeMetrics(t), false, - model.LegacyValidation, + model.UTF8Validation, + model.NoEscaping, fallback, ) } @@ -1109,7 +1134,8 @@ func TestScrapeLoopRun(t *testing.T) { false, scrapeMetrics, false, - model.LegacyValidation, + model.UTF8Validation, + model.NoEscaping, "", ) @@ -1256,7 +1282,8 @@ func TestScrapeLoopMetadata(t *testing.T) { false, scrapeMetrics, false, - model.LegacyValidation, + model.UTF8Validation, + model.NoEscaping, "", ) defer cancel() @@ -3041,6 +3068,47 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) { require.Equal(t, 0, seriesAdded) } +func TestAcceptHeader(t *testing.T) { + tests := []struct { + name string + scrapeProtocols []config.ScrapeProtocol + scheme model.EscapingScheme + expectedHeader string + }{ + { + name: "default scrape protocols with underscore escaping", + scrapeProtocols: config.DefaultScrapeProtocols, + scheme: model.UnderscoreEscaping, + expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=underscores;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=underscores;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2", + }, + { + name: "default proto first scrape protocols with underscore escaping", + scrapeProtocols: config.DefaultProtoFirstScrapeProtocols, + scheme: model.DotsEscaping, + expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=dots;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=dots;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1", + }, + { + name: "default scrape protocols with no escaping", + scrapeProtocols: config.DefaultScrapeProtocols, + scheme: model.NoEscaping, + expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2", + }, + { + name: "default proto first scrape protocols with no escaping", + scrapeProtocols: config.DefaultProtoFirstScrapeProtocols, + scheme: model.NoEscaping, + expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + header := acceptHeader(tc.scrapeProtocols, tc.scheme) + require.Equal(t, tc.expectedHeader, header) + }) + } +} + func TestTargetScraperScrapeOK(t *testing.T) { const ( configTimeout = 1500 * time.Millisecond @@ -3124,31 +3192,31 @@ func TestTargetScraperScrapeOK(t *testing.T) { for _, tc := range []struct { scrapeProtocols []config.ScrapeProtocol - scheme model.ValidationScheme + scheme model.EscapingScheme protobufParsing bool allowUTF8 bool }{ { scrapeProtocols: config.DefaultScrapeProtocols, - scheme: model.LegacyValidation, + scheme: model.UnderscoreEscaping, protobufParsing: false, allowUTF8: false, }, { scrapeProtocols: config.DefaultProtoFirstScrapeProtocols, - scheme: model.LegacyValidation, + scheme: model.UnderscoreEscaping, protobufParsing: true, allowUTF8: false, }, { scrapeProtocols: config.DefaultScrapeProtocols, - scheme: model.UTF8Validation, + scheme: model.NoEscaping, protobufParsing: false, allowUTF8: true, }, { scrapeProtocols: config.DefaultProtoFirstScrapeProtocols, - scheme: model.UTF8Validation, + scheme: model.NoEscaping, protobufParsing: true, allowUTF8: true, }, @@ -3185,7 +3253,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) { scrapeConfig: &config.ScrapeConfig{}, }, client: http.DefaultClient, - acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation), + acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping), } ctx, cancel := context.WithCancel(context.Background()) @@ -3241,7 +3309,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) { scrapeConfig: &config.ScrapeConfig{}, }, client: http.DefaultClient, - acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation), + acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping), } resp, err := ts.scrape(context.Background()) @@ -3286,7 +3354,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) { }, client: http.DefaultClient, bodySizeLimit: bodySizeLimit, - acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation), + acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping), metrics: newTestScrapeMetrics(t), } var buf bytes.Buffer @@ -3546,15 +3614,20 @@ func TestReuseScrapeCache(t *testing.T) { var ( app = &nopAppendable{} cfg = &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeTimeout: model.Duration(5 * time.Second), - ScrapeInterval: model.Duration(5 * time.Second), - MetricsPath: "/metrics", + JobName: "Prometheus", + ScrapeTimeout: model.Duration(5 * time.Second), + ScrapeInterval: model.Duration(5 * time.Second), + MetricsPath: "/metrics", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) t1 = &Target{ - labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"), - scrapeConfig: &config.ScrapeConfig{}, + labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"), + scrapeConfig: &config.ScrapeConfig{ + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + }, } proxyURL, _ = url.Parse("http://localhost:2128") ) @@ -3568,40 +3641,48 @@ func TestReuseScrapeCache(t *testing.T) { { keep: true, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(5 * time.Second), - MetricsPath: "/metrics", + JobName: "Prometheus", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(5 * time.Second), + MetricsPath: "/metrics", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics2", + JobName: "Prometheus", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics2", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: true, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - SampleLimit: 400, - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics2", + JobName: "Prometheus", + SampleLimit: 400, + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics2", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - HonorTimestamps: true, - SampleLimit: 400, - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics2", + JobName: "Prometheus", + HonorTimestamps: true, + SampleLimit: 400, + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics2", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { @@ -3613,64 +3694,76 @@ func TestReuseScrapeCache(t *testing.T) { HTTPClientConfig: config_util.HTTPClientConfig{ ProxyConfig: config_util.ProxyConfig{ProxyURL: config_util.URL{URL: proxyURL}}, }, - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics2", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics2", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - HonorTimestamps: true, - HonorLabels: true, - SampleLimit: 400, - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics2", + JobName: "Prometheus", + HonorTimestamps: true, + HonorLabels: true, + SampleLimit: 400, + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics2", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics", - LabelLimit: 1, + JobName: "Prometheus", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics", + LabelLimit: 1, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics", - LabelLimit: 15, + JobName: "Prometheus", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics", + LabelLimit: 15, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics", - LabelLimit: 15, - LabelNameLengthLimit: 5, + JobName: "Prometheus", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics", + LabelLimit: 15, + LabelNameLengthLimit: 5, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, { keep: false, newConfig: &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeInterval: model.Duration(5 * time.Second), - ScrapeTimeout: model.Duration(15 * time.Second), - MetricsPath: "/metrics", - LabelLimit: 15, - LabelNameLengthLimit: 5, - LabelValueLengthLimit: 7, + JobName: "Prometheus", + ScrapeInterval: model.Duration(5 * time.Second), + ScrapeTimeout: model.Duration(15 * time.Second), + MetricsPath: "/metrics", + LabelLimit: 15, + LabelNameLengthLimit: 5, + LabelValueLengthLimit: 7, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, }, } @@ -3730,10 +3823,12 @@ func TestReuseCacheRace(t *testing.T) { var ( app = &nopAppendable{} cfg = &config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeTimeout: model.Duration(5 * time.Second), - ScrapeInterval: model.Duration(5 * time.Second), - MetricsPath: "/metrics", + JobName: "Prometheus", + ScrapeTimeout: model.Duration(5 * time.Second), + ScrapeInterval: model.Duration(5 * time.Second), + MetricsPath: "/metrics", + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) }) sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t)) @@ -3751,11 +3846,13 @@ func TestReuseCacheRace(t *testing.T) { break } sp.reload(&config.ScrapeConfig{ - JobName: "Prometheus", - ScrapeTimeout: model.Duration(1 * time.Millisecond), - ScrapeInterval: model.Duration(1 * time.Millisecond), - MetricsPath: "/metrics", - SampleLimit: i, + JobName: "Prometheus", + ScrapeTimeout: model.Duration(1 * time.Millisecond), + ScrapeInterval: model.Duration(1 * time.Millisecond), + MetricsPath: "/metrics", + SampleLimit: i, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }) } } @@ -3827,11 +3924,13 @@ func TestScrapeReportLimit(t *testing.T) { defer s.Close() cfg := &config.ScrapeConfig{ - JobName: "test", - SampleLimit: 5, - Scheme: "http", - ScrapeInterval: model.Duration(100 * time.Millisecond), - ScrapeTimeout: model.Duration(100 * time.Millisecond), + JobName: "test", + SampleLimit: 5, + Scheme: "http", + ScrapeInterval: model.Duration(100 * time.Millisecond), + ScrapeTimeout: model.Duration(100 * time.Millisecond), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } ts, scrapedTwice := newScrapableServer("metric_a 44\nmetric_b 44\nmetric_c 44\nmetric_d 44\n") @@ -3887,6 +3986,7 @@ func TestScrapeUTF8(t *testing.T) { ScrapeInterval: model.Duration(100 * time.Millisecond), ScrapeTimeout: model.Duration(100 * time.Millisecond), MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } ts, scrapedTwice := newScrapableServer("{\"with.dots\"} 42\n") defer ts.Close() @@ -4019,8 +4119,10 @@ func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) { interval, _ := model.ParseDuration("2s") timeout, _ := model.ParseDuration("500ms") config := &config.ScrapeConfig{ - ScrapeInterval: interval, - ScrapeTimeout: timeout, + ScrapeInterval: interval, + ScrapeTimeout: timeout, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, RelabelConfigs: []*relabel.Config{ { SourceLabels: model.LabelNames{model.ScrapeIntervalLabel}, @@ -4077,10 +4179,12 @@ func TestLeQuantileReLabel(t *testing.T) { Action: relabel.Replace, }, }, - SampleLimit: 100, - Scheme: "http", - ScrapeInterval: model.Duration(100 * time.Millisecond), - ScrapeTimeout: model.Duration(100 * time.Millisecond), + SampleLimit: 100, + Scheme: "http", + ScrapeInterval: model.Duration(100 * time.Millisecond), + ScrapeTimeout: model.Duration(100 * time.Millisecond), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } metricsText := ` @@ -4586,6 +4690,8 @@ metric: < ScrapeTimeout: model.Duration(25 * time.Millisecond), AlwaysScrapeClassicHistograms: tc.alwaysScrapeClassicHistograms, ConvertClassicHistogramsToNHCB: tc.convertClassicHistToNHCB, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } scrapeCount := 0 @@ -4762,12 +4868,14 @@ func TestScrapeLoopCompression(t *testing.T) { defer ts.Close() config := &config.ScrapeConfig{ - JobName: "test", - SampleLimit: 100, - Scheme: "http", - ScrapeInterval: model.Duration(100 * time.Millisecond), - ScrapeTimeout: model.Duration(100 * time.Millisecond), - EnableCompression: tc.enableCompression, + JobName: "test", + SampleLimit: 100, + Scheme: "http", + ScrapeInterval: model.Duration(100 * time.Millisecond), + ScrapeTimeout: model.Duration(100 * time.Millisecond), + EnableCompression: tc.enableCompression, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, } sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) @@ -4922,7 +5030,9 @@ func BenchmarkTargetScraperGzip(b *testing.B) { model.AddressLabel, serverURL.Host, ), scrapeConfig: &config.ScrapeConfig{ - Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}}, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}}, }, }, client: client, @@ -5170,12 +5280,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) { { name: "Everything in scrape config", cfg: &config.ScrapeConfig{ - ScrapeInterval: model.Duration(2 * time.Second), - ScrapeTimeout: model.Duration(configTimeout), - Params: url.Values{"param": []string{expectedParam}}, - JobName: jobName, - Scheme: httpScheme, - MetricsPath: expectedPath, + ScrapeInterval: model.Duration(2 * time.Second), + ScrapeTimeout: model.Duration(configTimeout), + Params: url.Values{"param": []string{expectedParam}}, + JobName: jobName, + Scheme: httpScheme, + MetricsPath: expectedPath, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, }, targets: []*targetgroup.Group{ { @@ -5188,12 +5300,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) { { name: "Overridden in target", cfg: &config.ScrapeConfig{ - ScrapeInterval: model.Duration(2 * time.Second), - ScrapeTimeout: model.Duration(secondTimeout), - JobName: jobName, - Scheme: httpScheme, - MetricsPath: secondPath, - Params: url.Values{"param": []string{secondParam}}, + ScrapeInterval: model.Duration(2 * time.Second), + ScrapeTimeout: model.Duration(secondTimeout), + JobName: jobName, + Scheme: httpScheme, + MetricsPath: secondPath, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + Params: url.Values{"param": []string{secondParam}}, }, targets: []*targetgroup.Group{ { @@ -5211,12 +5325,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) { { name: "Overridden in relabel_config", cfg: &config.ScrapeConfig{ - ScrapeInterval: model.Duration(2 * time.Second), - ScrapeTimeout: model.Duration(secondTimeout), - JobName: jobName, - Scheme: httpScheme, - MetricsPath: secondPath, - Params: url.Values{"param": []string{secondParam}}, + ScrapeInterval: model.Duration(2 * time.Second), + ScrapeTimeout: model.Duration(secondTimeout), + JobName: jobName, + Scheme: httpScheme, + MetricsPath: secondPath, + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + Params: url.Values{"param": []string{secondParam}}, RelabelConfigs: []*relabel.Config{ { Action: relabel.DefaultRelabelConfig.Action, @@ -5290,12 +5406,14 @@ func TestScrapePoolScrapeAfterReload(t *testing.T) { t.Cleanup(h.Close) cfg := &config.ScrapeConfig{ - BodySizeLimit: 1, - JobName: "test", - Scheme: "http", - ScrapeInterval: model.Duration(100 * time.Millisecond), - ScrapeTimeout: model.Duration(100 * time.Millisecond), - EnableCompression: false, + BodySizeLimit: 1, + JobName: "test", + Scheme: "http", + ScrapeInterval: model.Duration(100 * time.Millisecond), + ScrapeTimeout: model.Duration(100 * time.Millisecond), + MetricNameValidationScheme: config.UTF8ValidationConfig, + MetricNameEscapingScheme: model.AllowUTF8, + EnableCompression: false, ServiceDiscoveryConfigs: discovery.Configs{ &discovery.StaticConfig{ {