mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-21 14:44:00 +00:00
lib/promscrape: extract common auth code to lib/promauth
This commit is contained in:
parent
1ce6c311dd
commit
7fbfef2aee
8 changed files with 329 additions and 350 deletions
135
lib/promauth/config.go
Normal file
135
lib/promauth/config.go
Normal file
|
@ -0,0 +1,135 @@
|
|||
package promauth
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
// TLSConfig represents TLS config.
|
||||
//
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config
|
||||
type TLSConfig struct {
|
||||
CAFile string `yaml:"ca_file"`
|
||||
CertFile string `yaml:"cert_file"`
|
||||
KeyFile string `yaml:"key_file"`
|
||||
ServerName string `yaml:"server_name"`
|
||||
InsecureSkipVerify bool `yaml:"insecure_skip_verify"`
|
||||
}
|
||||
|
||||
// BasicAuthConfig represents basic auth config.
|
||||
type BasicAuthConfig struct {
|
||||
Username string `yaml:"username"`
|
||||
Password string `yaml:"password"`
|
||||
PasswordFile string `yaml:"password_file"`
|
||||
}
|
||||
|
||||
// Config is auth config.
|
||||
type Config struct {
|
||||
// Optional `Authorization` header.
|
||||
//
|
||||
// It may contain `Basic ....` or `Bearer ....` string.
|
||||
Authorization string
|
||||
|
||||
// Optional TLS config
|
||||
TLSRootCA *x509.CertPool
|
||||
TLSCertificate *tls.Certificate
|
||||
TLSServerName string
|
||||
TLSInsecureSkipVerify bool
|
||||
}
|
||||
|
||||
// NewTLSConfig returns new TLS config for the given ac.
|
||||
func (ac *Config) NewTLSConfig() *tls.Config {
|
||||
tlsCfg := &tls.Config{
|
||||
RootCAs: ac.TLSRootCA,
|
||||
ClientSessionCache: tls.NewLRUClientSessionCache(0),
|
||||
}
|
||||
if ac.TLSCertificate != nil {
|
||||
tlsCfg.Certificates = []tls.Certificate{*ac.TLSCertificate}
|
||||
}
|
||||
tlsCfg.ServerName = ac.TLSServerName
|
||||
tlsCfg.InsecureSkipVerify = ac.TLSInsecureSkipVerify
|
||||
return tlsCfg
|
||||
}
|
||||
|
||||
// NewConfig creates auth config from the given args.
|
||||
func NewConfig(baseDir string, basicAuth *BasicAuthConfig, bearerToken, bearerTokenFile string, tlsConfig *TLSConfig) (*Config, error) {
|
||||
var authorization string
|
||||
if basicAuth != nil {
|
||||
if basicAuth.Username == "" {
|
||||
return nil, fmt.Errorf("missing `username` in `basic_auth` section")
|
||||
}
|
||||
username := basicAuth.Username
|
||||
password := basicAuth.Password
|
||||
if basicAuth.PasswordFile != "" {
|
||||
if basicAuth.Password != "" {
|
||||
return nil, fmt.Errorf("both `password`=%q and `password_file`=%q are set in `basic_auth` section", basicAuth.Password, basicAuth.PasswordFile)
|
||||
}
|
||||
path := getFilepath(baseDir, basicAuth.PasswordFile)
|
||||
pass, err := readPasswordFromFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read password from `password_file`=%q set in `basic_auth` section: %s", basicAuth.PasswordFile, err)
|
||||
}
|
||||
password = pass
|
||||
}
|
||||
// See https://en.wikipedia.org/wiki/Basic_access_authentication
|
||||
token := username + ":" + password
|
||||
token64 := base64.StdEncoding.EncodeToString([]byte(token))
|
||||
authorization = "Basic " + token64
|
||||
}
|
||||
if bearerTokenFile != "" {
|
||||
if bearerToken != "" {
|
||||
return nil, fmt.Errorf("both `bearer_token`=%q and `bearer_token_file`=%q are set", bearerToken, bearerTokenFile)
|
||||
}
|
||||
path := getFilepath(baseDir, bearerTokenFile)
|
||||
token, err := readPasswordFromFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read bearer token from `bearer_token_file`=%q: %s", bearerTokenFile, err)
|
||||
}
|
||||
bearerToken = token
|
||||
}
|
||||
if bearerToken != "" {
|
||||
if authorization != "" {
|
||||
return nil, fmt.Errorf("cannot use both `basic_auth` and `bearer_token`")
|
||||
}
|
||||
authorization = "Bearer " + bearerToken
|
||||
}
|
||||
var tlsRootCA *x509.CertPool
|
||||
var tlsCertificate *tls.Certificate
|
||||
tlsServerName := ""
|
||||
tlsInsecureSkipVerify := false
|
||||
if tlsConfig != nil {
|
||||
tlsServerName = tlsConfig.ServerName
|
||||
tlsInsecureSkipVerify = tlsConfig.InsecureSkipVerify
|
||||
if tlsConfig.CertFile != "" || tlsConfig.KeyFile != "" {
|
||||
certPath := getFilepath(baseDir, tlsConfig.CertFile)
|
||||
keyPath := getFilepath(baseDir, tlsConfig.KeyFile)
|
||||
cert, err := tls.LoadX509KeyPair(certPath, keyPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot load TLS certificate from `cert_file`=%q, `key_file`=%q: %s", tlsConfig.CertFile, tlsConfig.KeyFile, err)
|
||||
}
|
||||
tlsCertificate = &cert
|
||||
}
|
||||
if tlsConfig.CAFile != "" {
|
||||
path := getFilepath(baseDir, tlsConfig.CAFile)
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read `ca_file` %q: %s", tlsConfig.CAFile, err)
|
||||
}
|
||||
tlsRootCA = x509.NewCertPool()
|
||||
if !tlsRootCA.AppendCertsFromPEM(data) {
|
||||
return nil, fmt.Errorf("cannot parse data from `ca_file` %q", tlsConfig.CAFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
ac := &Config{
|
||||
Authorization: authorization,
|
||||
TLSRootCA: tlsRootCA,
|
||||
TLSCertificate: tlsCertificate,
|
||||
TLSServerName: tlsServerName,
|
||||
TLSInsecureSkipVerify: tlsInsecureSkipVerify,
|
||||
}
|
||||
return ac, nil
|
||||
}
|
24
lib/promauth/util.go
Normal file
24
lib/promauth/util.go
Normal file
|
@ -0,0 +1,24 @@
|
|||
package promauth
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
func getFilepath(baseDir, path string) string {
|
||||
if filepath.IsAbs(path) {
|
||||
return path
|
||||
}
|
||||
return filepath.Join(baseDir, path)
|
||||
}
|
||||
|
||||
func readPasswordFromFile(path string) (string, error) {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
pass := strings.TrimRightFunc(string(data), unicode.IsSpace)
|
||||
return pass, nil
|
||||
}
|
|
@ -36,7 +36,7 @@ func newClient(sw *ScrapeWork) *client {
|
|||
isTLS := string(u.Scheme()) == "https"
|
||||
var tlsCfg *tls.Config
|
||||
if isTLS {
|
||||
tlsCfg = getTLSConfig(sw)
|
||||
tlsCfg = sw.AuthConfig.NewTLSConfig()
|
||||
}
|
||||
if !strings.Contains(host, ":") {
|
||||
if !isTLS {
|
||||
|
@ -64,7 +64,7 @@ func newClient(sw *ScrapeWork) *client {
|
|||
scrapeURL: sw.ScrapeURL,
|
||||
host: host,
|
||||
requestURI: requestURI,
|
||||
authHeader: sw.Authorization,
|
||||
authHeader: sw.AuthConfig.Authorization,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -120,16 +120,3 @@ var (
|
|||
scrapesGunzipped = metrics.NewCounter(`vm_promscrape_scrapes_gunziped_total`)
|
||||
scrapesGunzipFailed = metrics.NewCounter(`vm_promscrape_scrapes_gunzip_failed_total`)
|
||||
)
|
||||
|
||||
func getTLSConfig(sw *ScrapeWork) *tls.Config {
|
||||
tlsCfg := &tls.Config{
|
||||
RootCAs: sw.TLSRootCA,
|
||||
ClientSessionCache: tls.NewLRUClientSessionCache(0),
|
||||
}
|
||||
if sw.TLSCertificate != nil {
|
||||
tlsCfg.Certificates = []tls.Certificate{*sw.TLSCertificate}
|
||||
}
|
||||
tlsCfg.ServerName = sw.TLSServerName
|
||||
tlsCfg.InsecureSkipVerify = sw.TLSInsecureSkipVerify
|
||||
return tlsCfg
|
||||
}
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
package promscrape
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
@ -48,10 +45,10 @@ type ScrapeConfig struct {
|
|||
HonorTimestamps bool `yaml:"honor_timestamps"`
|
||||
Scheme string `yaml:"scheme"`
|
||||
Params map[string][]string `yaml:"params"`
|
||||
BasicAuth *BasicAuthConfig `yaml:"basic_auth"`
|
||||
BasicAuth *promauth.BasicAuthConfig `yaml:"basic_auth"`
|
||||
BearerToken string `yaml:"bearer_token"`
|
||||
BearerTokenFile string `yaml:"bearer_token_file"`
|
||||
TLSConfig *TLSConfig `yaml:"tls_config"`
|
||||
TLSConfig *promauth.TLSConfig `yaml:"tls_config"`
|
||||
StaticConfigs []StaticConfig `yaml:"static_configs"`
|
||||
FileSDConfigs []FileSDConfig `yaml:"file_sd_configs"`
|
||||
RelabelConfigs []promrelabel.RelabelConfig `yaml:"relabel_configs"`
|
||||
|
@ -70,24 +67,6 @@ type FileSDConfig struct {
|
|||
// `refresh_interval` is ignored. See `-prometheus.fileSDCheckInterval`
|
||||
}
|
||||
|
||||
// TLSConfig represents TLS config.
|
||||
//
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config
|
||||
type TLSConfig struct {
|
||||
CAFile string `yaml:"ca_file"`
|
||||
CertFile string `yaml:"cert_file"`
|
||||
KeyFile string `yaml:"key_file"`
|
||||
ServerName string `yaml:"server_name"`
|
||||
InsecureSkipVerify bool `yaml:"insecure_skip_verify"`
|
||||
}
|
||||
|
||||
// BasicAuthConfig represents basic auth config.
|
||||
type BasicAuthConfig struct {
|
||||
Username string `yaml:"username"`
|
||||
Password string `yaml:"password"`
|
||||
PasswordFile string `yaml:"password_file"`
|
||||
}
|
||||
|
||||
// StaticConfig represents essential parts for `static_config` section of Prometheus config.
|
||||
//
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#static_config
|
||||
|
@ -150,36 +129,7 @@ func (cfg *Config) fileSDConfigsCount() int {
|
|||
}
|
||||
|
||||
// getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg.
|
||||
func (cfg *Config) getFileSDScrapeWork(prev []ScrapeWork) ([]ScrapeWork, error) {
|
||||
var sws []ScrapeWork
|
||||
for i := range cfg.ScrapeConfigs {
|
||||
var err error
|
||||
sws, err = cfg.ScrapeConfigs[i].appendFileSDScrapeWork(sws, prev, cfg.baseDir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when parsing `scrape_config` #%d: %s", i+1, err)
|
||||
}
|
||||
}
|
||||
return sws, nil
|
||||
}
|
||||
|
||||
// getStaticScrapeWork returns `static_configs` ScrapeWork from from cfg.
|
||||
func (cfg *Config) getStaticScrapeWork() ([]ScrapeWork, error) {
|
||||
var sws []ScrapeWork
|
||||
for i := range cfg.ScrapeConfigs {
|
||||
var err error
|
||||
sws, err = cfg.ScrapeConfigs[i].appendStaticScrapeWork(sws)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when parsing `scrape_config` #%d: %s", i+1, err)
|
||||
}
|
||||
}
|
||||
return sws, nil
|
||||
}
|
||||
|
||||
func (sc *ScrapeConfig) appendFileSDScrapeWork(dst, prev []ScrapeWork, baseDir string) ([]ScrapeWork, error) {
|
||||
if len(sc.FileSDConfigs) == 0 {
|
||||
// Fast path - no `file_sd_configs`
|
||||
return dst, nil
|
||||
}
|
||||
func (cfg *Config) getFileSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
||||
// Create a map for the previous scrape work.
|
||||
swPrev := make(map[string][]ScrapeWork)
|
||||
for i := range prev {
|
||||
|
@ -191,25 +141,24 @@ func (sc *ScrapeConfig) appendFileSDScrapeWork(dst, prev []ScrapeWork, baseDir s
|
|||
swPrev[label.Value] = append(swPrev[label.Value], *sw)
|
||||
}
|
||||
}
|
||||
for i := range sc.FileSDConfigs {
|
||||
var err error
|
||||
dst, err = sc.FileSDConfigs[i].appendScrapeWork(dst, swPrev, baseDir, sc.swc)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when parsing `file_sd_config` #%d: %s", i+1, err)
|
||||
var dst []ScrapeWork
|
||||
for _, sc := range cfg.ScrapeConfigs {
|
||||
for _, sdc := range sc.FileSDConfigs {
|
||||
dst = sdc.appendScrapeWork(dst, swPrev, cfg.baseDir, sc.swc)
|
||||
}
|
||||
}
|
||||
return dst, nil
|
||||
return dst
|
||||
}
|
||||
|
||||
func (sc *ScrapeConfig) appendStaticScrapeWork(dst []ScrapeWork) ([]ScrapeWork, error) {
|
||||
for i := range sc.StaticConfigs {
|
||||
var err error
|
||||
dst, err = sc.StaticConfigs[i].appendScrapeWork(dst, sc.swc)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when parsing `static_config` #%d: %s", i+1, err)
|
||||
// getStaticScrapeWork returns `static_configs` ScrapeWork from from cfg.
|
||||
func (cfg *Config) getStaticScrapeWork() []ScrapeWork {
|
||||
var dst []ScrapeWork
|
||||
for _, sc := range cfg.ScrapeConfigs {
|
||||
for _, stc := range sc.StaticConfigs {
|
||||
dst = stc.appendScrapeWork(dst, sc.swc, nil)
|
||||
}
|
||||
}
|
||||
return dst, nil
|
||||
return dst
|
||||
}
|
||||
|
||||
func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConfig) (*scrapeWorkConfig, error) {
|
||||
|
@ -245,79 +194,10 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
return nil, fmt.Errorf("unexpected `scheme` for `job_name` %q: %q; supported values: http or https", jobName, scheme)
|
||||
}
|
||||
params := sc.Params
|
||||
var authorization string
|
||||
if sc.BasicAuth != nil {
|
||||
if sc.BasicAuth.Username == "" {
|
||||
return nil, fmt.Errorf("missing `username` in `basic_auth` section for `job_name` %q", jobName)
|
||||
}
|
||||
username := sc.BasicAuth.Username
|
||||
password := sc.BasicAuth.Password
|
||||
if sc.BasicAuth.PasswordFile != "" {
|
||||
if sc.BasicAuth.Password != "" {
|
||||
return nil, fmt.Errorf("both `password`=%q and `password_file`=%q are set in `basic_auth` section for `job_name` %q",
|
||||
sc.BasicAuth.Password, sc.BasicAuth.PasswordFile, jobName)
|
||||
}
|
||||
path := getFilepath(baseDir, sc.BasicAuth.PasswordFile)
|
||||
pass, err := readPasswordFromFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read password from `password_file`=%q set in `basic_auth` section for `job_name` %q: %s",
|
||||
sc.BasicAuth.PasswordFile, jobName, err)
|
||||
}
|
||||
password = pass
|
||||
}
|
||||
// See https://en.wikipedia.org/wiki/Basic_access_authentication
|
||||
token := username + ":" + password
|
||||
token64 := base64.StdEncoding.EncodeToString([]byte(token))
|
||||
authorization = "Basic " + token64
|
||||
ac, err := promauth.NewConfig(baseDir, sc.BasicAuth, sc.BearerToken, sc.BearerTokenFile, sc.TLSConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot parse auth config for `job_name` %q: %s", jobName, err)
|
||||
}
|
||||
bearerToken := sc.BearerToken
|
||||
if sc.BearerTokenFile != "" {
|
||||
if sc.BearerToken != "" {
|
||||
return nil, fmt.Errorf("both `bearer_token`=%q and `bearer_token_file`=%q are set for `job_name` %q", sc.BearerToken, sc.BearerTokenFile, jobName)
|
||||
}
|
||||
path := getFilepath(baseDir, sc.BearerTokenFile)
|
||||
token, err := readPasswordFromFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read bearer token from `bearer_token_file`=%q for `job_name` %q: %s", sc.BearerTokenFile, jobName, err)
|
||||
}
|
||||
bearerToken = token
|
||||
}
|
||||
if bearerToken != "" {
|
||||
if authorization != "" {
|
||||
return nil, fmt.Errorf("cannot use both `basic_auth` and `bearer_token` for `job_name` %q", jobName)
|
||||
}
|
||||
authorization = "Bearer " + bearerToken
|
||||
}
|
||||
var tlsRootCA *x509.CertPool
|
||||
var tlsCertificate *tls.Certificate
|
||||
tlsServerName := ""
|
||||
tlsInsecureSkipVerify := false
|
||||
if sc.TLSConfig != nil {
|
||||
tlsServerName = sc.TLSConfig.ServerName
|
||||
tlsInsecureSkipVerify = sc.TLSConfig.InsecureSkipVerify
|
||||
if sc.TLSConfig.CertFile != "" || sc.TLSConfig.KeyFile != "" {
|
||||
certPath := getFilepath(baseDir, sc.TLSConfig.CertFile)
|
||||
keyPath := getFilepath(baseDir, sc.TLSConfig.KeyFile)
|
||||
cert, err := tls.LoadX509KeyPair(certPath, keyPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot load TLS certificate for `job_name` %q from `cert_file`=%q, `key_file`=%q: %s",
|
||||
jobName, sc.TLSConfig.CertFile, sc.TLSConfig.KeyFile, err)
|
||||
}
|
||||
tlsCertificate = &cert
|
||||
}
|
||||
if sc.TLSConfig.CAFile != "" {
|
||||
path := getFilepath(baseDir, sc.TLSConfig.CAFile)
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read `ca_file` %q for `job_name` %q: %s", sc.TLSConfig.CAFile, jobName, err)
|
||||
}
|
||||
tlsRootCA = x509.NewCertPool()
|
||||
if !tlsRootCA.AppendCertsFromPEM(data) {
|
||||
return nil, fmt.Errorf("cannot parse data from `ca_file` %q for `job_name` %q", sc.TLSConfig.CAFile, jobName)
|
||||
}
|
||||
}
|
||||
}
|
||||
var err error
|
||||
var relabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
relabelConfigs, err = promrelabel.ParseRelabelConfigs(relabelConfigs[:0], sc.RelabelConfigs)
|
||||
if err != nil {
|
||||
|
@ -330,49 +210,40 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
}
|
||||
scrapeLimit := sc.ScrapeLimit
|
||||
swc := &scrapeWorkConfig{
|
||||
scrapeInterval: scrapeInterval,
|
||||
scrapeTimeout: scrapeTimeout,
|
||||
jobName: jobName,
|
||||
metricsPath: metricsPath,
|
||||
scheme: scheme,
|
||||
params: params,
|
||||
authorization: authorization,
|
||||
honorLabels: honorLabels,
|
||||
honorTimestamps: honorTimestamps,
|
||||
externalLabels: globalCfg.ExternalLabels,
|
||||
tlsRootCA: tlsRootCA,
|
||||
tlsCertificate: tlsCertificate,
|
||||
tlsServerName: tlsServerName,
|
||||
tlsInsecureSkipVerify: tlsInsecureSkipVerify,
|
||||
relabelConfigs: relabelConfigs,
|
||||
metricRelabelConfigs: metricRelabelConfigs,
|
||||
scrapeLimit: scrapeLimit,
|
||||
scrapeInterval: scrapeInterval,
|
||||
scrapeTimeout: scrapeTimeout,
|
||||
jobName: jobName,
|
||||
metricsPath: metricsPath,
|
||||
scheme: scheme,
|
||||
params: params,
|
||||
authConfig: ac,
|
||||
honorLabels: honorLabels,
|
||||
honorTimestamps: honorTimestamps,
|
||||
externalLabels: globalCfg.ExternalLabels,
|
||||
relabelConfigs: relabelConfigs,
|
||||
metricRelabelConfigs: metricRelabelConfigs,
|
||||
scrapeLimit: scrapeLimit,
|
||||
}
|
||||
return swc, nil
|
||||
}
|
||||
|
||||
type scrapeWorkConfig struct {
|
||||
scrapeInterval time.Duration
|
||||
scrapeTimeout time.Duration
|
||||
jobName string
|
||||
metricsPath string
|
||||
scheme string
|
||||
params map[string][]string
|
||||
authorization string
|
||||
honorLabels bool
|
||||
honorTimestamps bool
|
||||
externalLabels map[string]string
|
||||
tlsRootCA *x509.CertPool
|
||||
tlsCertificate *tls.Certificate
|
||||
tlsServerName string
|
||||
tlsInsecureSkipVerify bool
|
||||
relabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
metricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
scrapeLimit int
|
||||
metaLabels map[string]string
|
||||
scrapeInterval time.Duration
|
||||
scrapeTimeout time.Duration
|
||||
jobName string
|
||||
metricsPath string
|
||||
scheme string
|
||||
params map[string][]string
|
||||
authConfig *promauth.Config
|
||||
honorLabels bool
|
||||
honorTimestamps bool
|
||||
externalLabels map[string]string
|
||||
relabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
metricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
scrapeLimit int
|
||||
}
|
||||
|
||||
func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]ScrapeWork, baseDir string, swc *scrapeWorkConfig) ([]ScrapeWork, error) {
|
||||
func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]ScrapeWork, baseDir string, swc *scrapeWorkConfig) []ScrapeWork {
|
||||
for _, file := range sdc.Files {
|
||||
pathPattern := getFilepath(baseDir, file)
|
||||
paths := []string{pathPattern}
|
||||
|
@ -380,7 +251,9 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]
|
|||
var err error
|
||||
paths, err = filepath.Glob(pathPattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid pattern %q in `files` section: %s", file, err)
|
||||
// Do not return this error, since other files may contain valid scrape configs.
|
||||
logger.Errorf("invalid pattern %q in `files` section: %s; skipping it", file, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
for _, path := range paths {
|
||||
|
@ -396,7 +269,6 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]
|
|||
}
|
||||
continue
|
||||
}
|
||||
swcCopy := *swc
|
||||
pathShort := path
|
||||
if strings.HasPrefix(pathShort, baseDir) {
|
||||
pathShort = path[len(baseDir):]
|
||||
|
@ -404,88 +276,89 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]
|
|||
pathShort = pathShort[1:]
|
||||
}
|
||||
}
|
||||
swcCopy.metaLabels = map[string]string{
|
||||
metaLabels := map[string]string{
|
||||
"__meta_filepath": pathShort,
|
||||
}
|
||||
for i := range stcs {
|
||||
dst, err = stcs[i].appendScrapeWork(dst, &swcCopy)
|
||||
if err != nil {
|
||||
// Do not return this error, since other paths may contain valid scrape configs.
|
||||
logger.Errorf("error when parsing `static_config` #%d from %q: %s", i+1, path, err)
|
||||
continue
|
||||
}
|
||||
dst = stcs[i].appendScrapeWork(dst, swc, metaLabels)
|
||||
}
|
||||
}
|
||||
}
|
||||
return dst, nil
|
||||
return dst
|
||||
}
|
||||
|
||||
func (stc *StaticConfig) appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig) ([]ScrapeWork, error) {
|
||||
func (stc *StaticConfig) appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, metaLabels map[string]string) []ScrapeWork {
|
||||
for _, target := range stc.Targets {
|
||||
if target == "" {
|
||||
return nil, fmt.Errorf("`static_configs` target for `job_name` %q cannot be empty", swc.jobName)
|
||||
// Do not return this error, since other targets may be valid
|
||||
logger.Errorf("`static_configs` target for `job_name` %q cannot be empty; skipping it", swc.jobName)
|
||||
continue
|
||||
}
|
||||
labels, err := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, stc.Labels, swc.externalLabels, swc.metaLabels, swc.params)
|
||||
var err error
|
||||
dst, err = appendScrapeWork(dst, swc, target, stc.Labels, metaLabels)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot merge labels for `static_configs` target for `job_name` %q: %s", swc.jobName, err)
|
||||
}
|
||||
labels = promrelabel.ApplyRelabelConfigs(labels, 0, swc.relabelConfigs, false)
|
||||
if len(labels) == 0 {
|
||||
// Drop target without labels.
|
||||
// Do not return this error, since other targets may be valid
|
||||
logger.Errorf("error when parsing `static_configs` target %q for `job_name` %q: %s; skipping it", target, swc.jobName, err)
|
||||
continue
|
||||
}
|
||||
// See https://www.robustperception.io/life-of-a-label
|
||||
schemeRelabeled := ""
|
||||
if schemeLabel := promrelabel.GetLabelByName(labels, "__scheme__"); schemeLabel != nil {
|
||||
schemeRelabeled = schemeLabel.Value
|
||||
}
|
||||
if schemeRelabeled == "" {
|
||||
schemeRelabeled = "http"
|
||||
}
|
||||
addressLabel := promrelabel.GetLabelByName(labels, "__address__")
|
||||
if addressLabel == nil || addressLabel.Name == "" {
|
||||
// Drop target without scrape address.
|
||||
continue
|
||||
}
|
||||
targetRelabeled := addMissingPort(schemeRelabeled, addressLabel.Value)
|
||||
if strings.Contains(targetRelabeled, "/") {
|
||||
// Drop target with '/'
|
||||
continue
|
||||
}
|
||||
metricsPathRelabeled := ""
|
||||
if metricsPathLabel := promrelabel.GetLabelByName(labels, "__metrics_path__"); metricsPathLabel != nil {
|
||||
metricsPathRelabeled = metricsPathLabel.Value
|
||||
}
|
||||
if metricsPathRelabeled == "" {
|
||||
metricsPathRelabeled = "/metrics"
|
||||
}
|
||||
paramsRelabeled := getParamsFromLabels(labels, swc.params)
|
||||
optionalQuestion := "?"
|
||||
if len(paramsRelabeled) == 0 || strings.Contains(metricsPathRelabeled, "?") {
|
||||
optionalQuestion = ""
|
||||
}
|
||||
paramsStr := url.Values(paramsRelabeled).Encode()
|
||||
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", schemeRelabeled, targetRelabeled, metricsPathRelabeled, optionalQuestion, paramsStr)
|
||||
if _, err := url.Parse(scrapeURL); err != nil {
|
||||
return nil, fmt.Errorf("invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %s",
|
||||
scrapeURL, swc.scheme, schemeRelabeled, target, targetRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
|
||||
}
|
||||
dst = append(dst, ScrapeWork{
|
||||
ScrapeURL: scrapeURL,
|
||||
ScrapeInterval: swc.scrapeInterval,
|
||||
ScrapeTimeout: swc.scrapeTimeout,
|
||||
HonorLabels: swc.honorLabels,
|
||||
HonorTimestamps: swc.honorTimestamps,
|
||||
Labels: labels,
|
||||
Authorization: swc.authorization,
|
||||
TLSRootCA: swc.tlsRootCA,
|
||||
TLSCertificate: swc.tlsCertificate,
|
||||
TLSServerName: swc.tlsServerName,
|
||||
TLSInsecureSkipVerify: swc.tlsInsecureSkipVerify,
|
||||
MetricRelabelConfigs: swc.metricRelabelConfigs,
|
||||
ScrapeLimit: swc.scrapeLimit,
|
||||
})
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, extraLabels, metaLabels map[string]string) ([]ScrapeWork, error) {
|
||||
labels := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, extraLabels, swc.externalLabels, metaLabels, swc.params)
|
||||
labels = promrelabel.ApplyRelabelConfigs(labels, 0, swc.relabelConfigs, false)
|
||||
if len(labels) == 0 {
|
||||
// Drop target without labels.
|
||||
return dst, nil
|
||||
}
|
||||
// See https://www.robustperception.io/life-of-a-label
|
||||
schemeRelabeled := ""
|
||||
if schemeLabel := promrelabel.GetLabelByName(labels, "__scheme__"); schemeLabel != nil {
|
||||
schemeRelabeled = schemeLabel.Value
|
||||
}
|
||||
if schemeRelabeled == "" {
|
||||
schemeRelabeled = "http"
|
||||
}
|
||||
addressLabel := promrelabel.GetLabelByName(labels, "__address__")
|
||||
if addressLabel == nil || addressLabel.Name == "" {
|
||||
// Drop target without scrape address.
|
||||
return dst, nil
|
||||
}
|
||||
targetRelabeled := addMissingPort(schemeRelabeled, addressLabel.Value)
|
||||
if strings.Contains(targetRelabeled, "/") {
|
||||
// Drop target with '/'
|
||||
return dst, nil
|
||||
}
|
||||
metricsPathRelabeled := ""
|
||||
if metricsPathLabel := promrelabel.GetLabelByName(labels, "__metrics_path__"); metricsPathLabel != nil {
|
||||
metricsPathRelabeled = metricsPathLabel.Value
|
||||
}
|
||||
if metricsPathRelabeled == "" {
|
||||
metricsPathRelabeled = "/metrics"
|
||||
}
|
||||
paramsRelabeled := getParamsFromLabels(labels, swc.params)
|
||||
optionalQuestion := "?"
|
||||
if len(paramsRelabeled) == 0 || strings.Contains(metricsPathRelabeled, "?") {
|
||||
optionalQuestion = ""
|
||||
}
|
||||
paramsStr := url.Values(paramsRelabeled).Encode()
|
||||
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", schemeRelabeled, targetRelabeled, metricsPathRelabeled, optionalQuestion, paramsStr)
|
||||
if _, err := url.Parse(scrapeURL); err != nil {
|
||||
return dst, fmt.Errorf("invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %s",
|
||||
scrapeURL, swc.scheme, schemeRelabeled, target, targetRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
|
||||
}
|
||||
dst = append(dst, ScrapeWork{
|
||||
ScrapeURL: scrapeURL,
|
||||
ScrapeInterval: swc.scrapeInterval,
|
||||
ScrapeTimeout: swc.scrapeTimeout,
|
||||
HonorLabels: swc.honorLabels,
|
||||
HonorTimestamps: swc.honorTimestamps,
|
||||
Labels: labels,
|
||||
AuthConfig: swc.authConfig,
|
||||
MetricRelabelConfigs: swc.metricRelabelConfigs,
|
||||
ScrapeLimit: swc.scrapeLimit,
|
||||
})
|
||||
return dst, nil
|
||||
}
|
||||
|
||||
|
@ -507,7 +380,7 @@ func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]s
|
|||
return m
|
||||
}
|
||||
|
||||
func mergeLabels(job, scheme, target, metricsPath string, labels, externalLabels, metaLabels map[string]string, params map[string][]string) ([]prompbmarshal.Label, error) {
|
||||
func mergeLabels(job, scheme, target, metricsPath string, extraLabels, externalLabels, metaLabels map[string]string, params map[string][]string) []prompbmarshal.Label {
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
|
||||
m := make(map[string]string)
|
||||
for k, v := range externalLabels {
|
||||
|
@ -525,7 +398,7 @@ func mergeLabels(job, scheme, target, metricsPath string, labels, externalLabels
|
|||
v := args[0]
|
||||
m[k] = v
|
||||
}
|
||||
for k, v := range labels {
|
||||
for k, v := range extraLabels {
|
||||
m[k] = v
|
||||
}
|
||||
for k, v := range metaLabels {
|
||||
|
@ -538,7 +411,7 @@ func mergeLabels(job, scheme, target, metricsPath string, labels, externalLabels
|
|||
Value: v,
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
return result
|
||||
}
|
||||
|
||||
func getFilepath(baseDir, path string) string {
|
||||
|
@ -548,15 +421,6 @@ func getFilepath(baseDir, path string) string {
|
|||
return filepath.Join(baseDir, path)
|
||||
}
|
||||
|
||||
func readPasswordFromFile(path string) (string, error) {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
pass := strings.TrimRightFunc(string(data), unicode.IsSpace)
|
||||
return pass, nil
|
||||
}
|
||||
|
||||
func addMissingPort(scheme, target string) string {
|
||||
if strings.Contains(target, ":") {
|
||||
return target
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
||||
)
|
||||
|
@ -79,10 +80,7 @@ scrape_configs:
|
|||
if err := cfg.parse([]byte(data), "sss"); err != nil {
|
||||
t.Fatalf("cannot parase data: %s", err)
|
||||
}
|
||||
sws, err := cfg.getFileSDScrapeWork(nil)
|
||||
if err != nil {
|
||||
t.Fatalf("cannot obtain `file_sd_config`: %s", err)
|
||||
}
|
||||
sws := cfg.getFileSDScrapeWork(nil)
|
||||
if !equalStaticConfigForScrapeWorks(sws, sws) {
|
||||
t.Fatalf("unexpected non-equal static configs;\nsws:\n%#v", sws)
|
||||
}
|
||||
|
@ -98,10 +96,7 @@ scrape_configs:
|
|||
if err := cfgNew.parse([]byte(dataNew), "sss"); err != nil {
|
||||
t.Fatalf("cannot parse data: %s", err)
|
||||
}
|
||||
swsNew, err := cfgNew.getFileSDScrapeWork(sws)
|
||||
if err != nil {
|
||||
t.Fatalf("cannot obtain `file_sd_config`: %s", err)
|
||||
}
|
||||
swsNew := cfgNew.getFileSDScrapeWork(sws)
|
||||
if equalStaticConfigForScrapeWorks(swsNew, sws) {
|
||||
t.Fatalf("unexpected equal static configs;\nswsNew:\n%#v\nsws:\n%#v", swsNew, sws)
|
||||
}
|
||||
|
@ -116,10 +111,7 @@ scrape_configs:
|
|||
if err := cfg.parse([]byte(data), "sss"); err != nil {
|
||||
t.Fatalf("cannot parse data: %s", err)
|
||||
}
|
||||
sws, err = cfg.getFileSDScrapeWork(swsNew)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
sws = cfg.getFileSDScrapeWork(swsNew)
|
||||
if len(sws) != 0 {
|
||||
t.Fatalf("unexpected non-empty sws:\n%#v", sws)
|
||||
}
|
||||
|
@ -134,10 +126,7 @@ scrape_configs:
|
|||
if err := cfg.parse([]byte(data), "sss"); err != nil {
|
||||
t.Fatalf("cannot parse data: %s", err)
|
||||
}
|
||||
sws, err = cfg.getFileSDScrapeWork(swsNew)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
sws = cfg.getFileSDScrapeWork(swsNew)
|
||||
if len(sws) != 0 {
|
||||
t.Fatalf("unexpected non-empty sws:\n%#v", sws)
|
||||
}
|
||||
|
@ -148,7 +137,7 @@ func getFileSDScrapeWork(data []byte, path string) ([]ScrapeWork, error) {
|
|||
if err := cfg.parse(data, path); err != nil {
|
||||
return nil, fmt.Errorf("cannot parse data: %s", err)
|
||||
}
|
||||
return cfg.getFileSDScrapeWork(nil)
|
||||
return cfg.getFileSDScrapeWork(nil), nil
|
||||
}
|
||||
|
||||
func getStaticScrapeWork(data []byte, path string) ([]ScrapeWork, error) {
|
||||
|
@ -156,7 +145,7 @@ func getStaticScrapeWork(data []byte, path string) ([]ScrapeWork, error) {
|
|||
if err := cfg.parse(data, path); err != nil {
|
||||
return nil, fmt.Errorf("cannot parse data: %s", err)
|
||||
}
|
||||
return cfg.getStaticScrapeWork()
|
||||
return cfg.getStaticScrapeWork(), nil
|
||||
}
|
||||
|
||||
func TestGetStaticScrapeWorkFailure(t *testing.T) {
|
||||
|
@ -190,22 +179,6 @@ scrape_configs:
|
|||
- targets: ["foo"]
|
||||
`)
|
||||
|
||||
// Empty target
|
||||
f(`
|
||||
scrape_configs:
|
||||
- job_name: x
|
||||
static_configs:
|
||||
- targets: ["foo", ""]
|
||||
`)
|
||||
|
||||
// Invalid url
|
||||
f(`
|
||||
scrape_configs:
|
||||
- job_name: x
|
||||
static_configs:
|
||||
- targets: ["a b"]
|
||||
`)
|
||||
|
||||
// Missing username in `basic_auth`
|
||||
f(`
|
||||
scrape_configs:
|
||||
|
@ -456,6 +429,7 @@ scrape_configs:
|
|||
Value: "rty",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://host2:80/abc/de",
|
||||
|
@ -489,6 +463,7 @@ scrape_configs:
|
|||
Value: "rty",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://localhost:9090/abc/de",
|
||||
|
@ -522,6 +497,7 @@ scrape_configs:
|
|||
Value: "test",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -568,6 +544,7 @@ scrape_configs:
|
|||
Value: "foo",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -612,6 +589,7 @@ scrape_configs:
|
|||
Value: "xxx",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -676,7 +654,9 @@ scrape_configs:
|
|||
Value: "y",
|
||||
},
|
||||
},
|
||||
Authorization: "Bearer xyz",
|
||||
AuthConfig: &promauth.Config{
|
||||
Authorization: "Bearer xyz",
|
||||
},
|
||||
},
|
||||
{
|
||||
ScrapeURL: "https://aaa:443/foo/bar?p=x%26y&p=%3D",
|
||||
|
@ -710,7 +690,9 @@ scrape_configs:
|
|||
Value: "y",
|
||||
},
|
||||
},
|
||||
Authorization: "Bearer xyz",
|
||||
AuthConfig: &promauth.Config{
|
||||
Authorization: "Bearer xyz",
|
||||
},
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://1.2.3.4:80/metrics",
|
||||
|
@ -736,9 +718,11 @@ scrape_configs:
|
|||
Value: "qwer",
|
||||
},
|
||||
},
|
||||
Authorization: "Basic dXNlcjpwYXNz",
|
||||
TLSServerName: "foobar",
|
||||
TLSInsecureSkipVerify: true,
|
||||
AuthConfig: &promauth.Config{
|
||||
Authorization: "Basic dXNlcjpwYXNz",
|
||||
TLSServerName: "foobar",
|
||||
TLSInsecureSkipVerify: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -807,6 +791,7 @@ scrape_configs:
|
|||
Value: "http://foo.bar:1234/metrics",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -867,6 +852,7 @@ scrape_configs:
|
|||
Value: "https",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -904,6 +890,7 @@ scrape_configs:
|
|||
Value: "3",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -937,6 +924,7 @@ scrape_configs:
|
|||
Value: "foo",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
MetricRelabelConfigs: []promrelabel.ParsedRelabelConfig{
|
||||
{
|
||||
SourceLabels: []string{"foo"},
|
||||
|
@ -980,7 +968,9 @@ scrape_configs:
|
|||
Value: "foo",
|
||||
},
|
||||
},
|
||||
Authorization: "Basic eHl6OnNlY3JldC1wYXNz",
|
||||
AuthConfig: &promauth.Config{
|
||||
Authorization: "Basic eHl6OnNlY3JldC1wYXNz",
|
||||
},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -1012,7 +1002,9 @@ scrape_configs:
|
|||
Value: "foo",
|
||||
},
|
||||
},
|
||||
Authorization: "Bearer secret-pass",
|
||||
AuthConfig: &promauth.Config{
|
||||
Authorization: "Bearer secret-pass",
|
||||
},
|
||||
},
|
||||
})
|
||||
snakeoilCert, err := tls.LoadX509KeyPair("testdata/ssl-cert-snakeoil.pem", "testdata/ssl-cert-snakeoil.key")
|
||||
|
@ -1050,7 +1042,9 @@ scrape_configs:
|
|||
Value: "foo",
|
||||
},
|
||||
},
|
||||
TLSCertificate: &snakeoilCert,
|
||||
AuthConfig: &promauth.Config{
|
||||
TLSCertificate: &snakeoilCert,
|
||||
},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -1107,6 +1101,7 @@ scrape_configs:
|
|||
Value: "qwe",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
f(`
|
||||
|
@ -1160,6 +1155,7 @@ scrape_configs:
|
|||
Value: "snmp",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -56,14 +56,8 @@ func runScraper(configFile string, pushData func(wr *prompbmarshal.WriteRequest)
|
|||
if err != nil {
|
||||
logger.Fatalf("cannot read %q: %s", configFile, err)
|
||||
}
|
||||
swsStatic, err := cfg.getStaticScrapeWork()
|
||||
if err != nil {
|
||||
logger.Fatalf("cannot parse `static_configs` from %q: %s", configFile, err)
|
||||
}
|
||||
swsFileSD, err := cfg.getFileSDScrapeWork(nil)
|
||||
if err != nil {
|
||||
logger.Fatalf("cannot parse `file_sd_config` from %q: %s", configFile, err)
|
||||
}
|
||||
swsStatic := cfg.getStaticScrapeWork()
|
||||
swsFileSD := cfg.getFileSDScrapeWork(nil)
|
||||
|
||||
mustStop := false
|
||||
for !mustStop {
|
||||
|
@ -89,18 +83,9 @@ func runScraper(configFile string, pushData func(wr *prompbmarshal.WriteRequest)
|
|||
logger.Errorf("cannot read %q: %s; continuing with the previous config", configFile, err)
|
||||
goto waitForChans
|
||||
}
|
||||
swsStaticNew, err := cfgNew.getStaticScrapeWork()
|
||||
if err != nil {
|
||||
logger.Errorf("cannot parse `static_configs` from %q: %s; continuing with the previous config", configFile, err)
|
||||
goto waitForChans
|
||||
}
|
||||
swsFileSDNew, err := cfgNew.getFileSDScrapeWork(swsFileSD)
|
||||
if err != nil {
|
||||
logger.Errorf("cannot parse `file_sd_config` from %q: %s; continuing with the previous config", configFile, err)
|
||||
}
|
||||
cfg = cfgNew
|
||||
swsStatic = swsStaticNew
|
||||
swsFileSD = swsFileSDNew
|
||||
swsStatic = cfg.getStaticScrapeWork()
|
||||
swsFileSD = cfg.getFileSDScrapeWork(swsFileSD)
|
||||
case <-globalStopCh:
|
||||
mustStop = true
|
||||
}
|
||||
|
@ -151,10 +136,7 @@ func runFileSDScrapers(sws []ScrapeWork, cfg *Config, pushData func(wr *prompbma
|
|||
waitForChans:
|
||||
select {
|
||||
case <-ticker.C:
|
||||
swsNew, err := cfg.getFileSDScrapeWork(sws)
|
||||
if err != nil {
|
||||
logger.Panicf("BUG: error when re-reading `file_sd_config` targets the second time: %s", err)
|
||||
}
|
||||
swsNew := cfg.getFileSDScrapeWork(sws)
|
||||
if equalStaticConfigForScrapeWorks(swsNew, sws) {
|
||||
// Nothing changed, continue waiting for updated scrape work
|
||||
goto waitForChans
|
||||
|
@ -173,7 +155,7 @@ func runFileSDScrapers(sws []ScrapeWork, cfg *Config, pushData func(wr *prompbma
|
|||
|
||||
var (
|
||||
fileSDTargets = metrics.NewCounter(`vm_promscrape_targets{type="file_sd"}`)
|
||||
fileSDReloads = metrics.NewCounter(`vm_promscrape_file_sd_reloads_total`)
|
||||
fileSDReloads = metrics.NewCounter(`vm_promscrape_reloads_total{type="file_sd"}`)
|
||||
)
|
||||
|
||||
func equalStaticConfigForScrapeWorks(as, bs []ScrapeWork) bool {
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
package promscrape
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
||||
parser "github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/prometheus"
|
||||
|
@ -48,16 +47,8 @@ type ScrapeWork struct {
|
|||
// See also https://prometheus.io/docs/concepts/jobs_instances/
|
||||
Labels []prompbmarshal.Label
|
||||
|
||||
// Optional `Authorization` header.
|
||||
//
|
||||
// It may contain `Basic ....` or `Bearer ....` string.
|
||||
Authorization string
|
||||
|
||||
// Optional TLS config
|
||||
TLSRootCA *x509.CertPool
|
||||
TLSCertificate *tls.Certificate
|
||||
TLSServerName string
|
||||
TLSInsecureSkipVerify bool
|
||||
// Auth config
|
||||
AuthConfig *promauth.Config
|
||||
|
||||
// Optional `metric_relabel_configs`.
|
||||
MetricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
|
|
|
@ -1 +1 @@
|
|||
- targets: ["foo", ""]
|
||||
- targets: [""]
|
||||
|
|
Loading…
Reference in a new issue