lib/promscrape: extract common auth code to lib/promauth

This commit is contained in:
Aliaksandr Valialkin 2020-04-13 12:59:05 +03:00
parent 2eed6c393f
commit 90b4a6dd12
8 changed files with 329 additions and 350 deletions

135
lib/promauth/config.go Normal file
View file

@ -0,0 +1,135 @@
package promauth
import (
"crypto/tls"
"crypto/x509"
"encoding/base64"
"fmt"
"io/ioutil"
)
// TLSConfig represents TLS config.
//
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config
type TLSConfig struct {
CAFile string `yaml:"ca_file"`
CertFile string `yaml:"cert_file"`
KeyFile string `yaml:"key_file"`
ServerName string `yaml:"server_name"`
InsecureSkipVerify bool `yaml:"insecure_skip_verify"`
}
// BasicAuthConfig represents basic auth config.
type BasicAuthConfig struct {
Username string `yaml:"username"`
Password string `yaml:"password"`
PasswordFile string `yaml:"password_file"`
}
// Config is auth config.
type Config struct {
// Optional `Authorization` header.
//
// It may contain `Basic ....` or `Bearer ....` string.
Authorization string
// Optional TLS config
TLSRootCA *x509.CertPool
TLSCertificate *tls.Certificate
TLSServerName string
TLSInsecureSkipVerify bool
}
// NewTLSConfig returns new TLS config for the given ac.
func (ac *Config) NewTLSConfig() *tls.Config {
tlsCfg := &tls.Config{
RootCAs: ac.TLSRootCA,
ClientSessionCache: tls.NewLRUClientSessionCache(0),
}
if ac.TLSCertificate != nil {
tlsCfg.Certificates = []tls.Certificate{*ac.TLSCertificate}
}
tlsCfg.ServerName = ac.TLSServerName
tlsCfg.InsecureSkipVerify = ac.TLSInsecureSkipVerify
return tlsCfg
}
// NewConfig creates auth config from the given args.
func NewConfig(baseDir string, basicAuth *BasicAuthConfig, bearerToken, bearerTokenFile string, tlsConfig *TLSConfig) (*Config, error) {
var authorization string
if basicAuth != nil {
if basicAuth.Username == "" {
return nil, fmt.Errorf("missing `username` in `basic_auth` section")
}
username := basicAuth.Username
password := basicAuth.Password
if basicAuth.PasswordFile != "" {
if basicAuth.Password != "" {
return nil, fmt.Errorf("both `password`=%q and `password_file`=%q are set in `basic_auth` section", basicAuth.Password, basicAuth.PasswordFile)
}
path := getFilepath(baseDir, basicAuth.PasswordFile)
pass, err := readPasswordFromFile(path)
if err != nil {
return nil, fmt.Errorf("cannot read password from `password_file`=%q set in `basic_auth` section: %s", basicAuth.PasswordFile, err)
}
password = pass
}
// See https://en.wikipedia.org/wiki/Basic_access_authentication
token := username + ":" + password
token64 := base64.StdEncoding.EncodeToString([]byte(token))
authorization = "Basic " + token64
}
if bearerTokenFile != "" {
if bearerToken != "" {
return nil, fmt.Errorf("both `bearer_token`=%q and `bearer_token_file`=%q are set", bearerToken, bearerTokenFile)
}
path := getFilepath(baseDir, bearerTokenFile)
token, err := readPasswordFromFile(path)
if err != nil {
return nil, fmt.Errorf("cannot read bearer token from `bearer_token_file`=%q: %s", bearerTokenFile, err)
}
bearerToken = token
}
if bearerToken != "" {
if authorization != "" {
return nil, fmt.Errorf("cannot use both `basic_auth` and `bearer_token`")
}
authorization = "Bearer " + bearerToken
}
var tlsRootCA *x509.CertPool
var tlsCertificate *tls.Certificate
tlsServerName := ""
tlsInsecureSkipVerify := false
if tlsConfig != nil {
tlsServerName = tlsConfig.ServerName
tlsInsecureSkipVerify = tlsConfig.InsecureSkipVerify
if tlsConfig.CertFile != "" || tlsConfig.KeyFile != "" {
certPath := getFilepath(baseDir, tlsConfig.CertFile)
keyPath := getFilepath(baseDir, tlsConfig.KeyFile)
cert, err := tls.LoadX509KeyPair(certPath, keyPath)
if err != nil {
return nil, fmt.Errorf("cannot load TLS certificate from `cert_file`=%q, `key_file`=%q: %s", tlsConfig.CertFile, tlsConfig.KeyFile, err)
}
tlsCertificate = &cert
}
if tlsConfig.CAFile != "" {
path := getFilepath(baseDir, tlsConfig.CAFile)
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("cannot read `ca_file` %q: %s", tlsConfig.CAFile, err)
}
tlsRootCA = x509.NewCertPool()
if !tlsRootCA.AppendCertsFromPEM(data) {
return nil, fmt.Errorf("cannot parse data from `ca_file` %q", tlsConfig.CAFile)
}
}
}
ac := &Config{
Authorization: authorization,
TLSRootCA: tlsRootCA,
TLSCertificate: tlsCertificate,
TLSServerName: tlsServerName,
TLSInsecureSkipVerify: tlsInsecureSkipVerify,
}
return ac, nil
}

24
lib/promauth/util.go Normal file
View file

@ -0,0 +1,24 @@
package promauth
import (
"io/ioutil"
"path/filepath"
"strings"
"unicode"
)
func getFilepath(baseDir, path string) string {
if filepath.IsAbs(path) {
return path
}
return filepath.Join(baseDir, path)
}
func readPasswordFromFile(path string) (string, error) {
data, err := ioutil.ReadFile(path)
if err != nil {
return "", err
}
pass := strings.TrimRightFunc(string(data), unicode.IsSpace)
return pass, nil
}

View file

@ -36,7 +36,7 @@ func newClient(sw *ScrapeWork) *client {
isTLS := string(u.Scheme()) == "https" isTLS := string(u.Scheme()) == "https"
var tlsCfg *tls.Config var tlsCfg *tls.Config
if isTLS { if isTLS {
tlsCfg = getTLSConfig(sw) tlsCfg = sw.AuthConfig.NewTLSConfig()
} }
if !strings.Contains(host, ":") { if !strings.Contains(host, ":") {
if !isTLS { if !isTLS {
@ -64,7 +64,7 @@ func newClient(sw *ScrapeWork) *client {
scrapeURL: sw.ScrapeURL, scrapeURL: sw.ScrapeURL,
host: host, host: host,
requestURI: requestURI, requestURI: requestURI,
authHeader: sw.Authorization, authHeader: sw.AuthConfig.Authorization,
} }
} }
@ -120,16 +120,3 @@ var (
scrapesGunzipped = metrics.NewCounter(`vm_promscrape_scrapes_gunziped_total`) scrapesGunzipped = metrics.NewCounter(`vm_promscrape_scrapes_gunziped_total`)
scrapesGunzipFailed = metrics.NewCounter(`vm_promscrape_scrapes_gunzip_failed_total`) scrapesGunzipFailed = metrics.NewCounter(`vm_promscrape_scrapes_gunzip_failed_total`)
) )
func getTLSConfig(sw *ScrapeWork) *tls.Config {
tlsCfg := &tls.Config{
RootCAs: sw.TLSRootCA,
ClientSessionCache: tls.NewLRUClientSessionCache(0),
}
if sw.TLSCertificate != nil {
tlsCfg.Certificates = []tls.Certificate{*sw.TLSCertificate}
}
tlsCfg.ServerName = sw.TLSServerName
tlsCfg.InsecureSkipVerify = sw.TLSInsecureSkipVerify
return tlsCfg
}

View file

@ -1,18 +1,15 @@
package promscrape package promscrape
import ( import (
"crypto/tls"
"crypto/x509"
"encoding/base64"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"net/url" "net/url"
"path/filepath" "path/filepath"
"strings" "strings"
"time" "time"
"unicode"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger" "github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal" "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel" "github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
@ -48,10 +45,10 @@ type ScrapeConfig struct {
HonorTimestamps bool `yaml:"honor_timestamps"` HonorTimestamps bool `yaml:"honor_timestamps"`
Scheme string `yaml:"scheme"` Scheme string `yaml:"scheme"`
Params map[string][]string `yaml:"params"` Params map[string][]string `yaml:"params"`
BasicAuth *BasicAuthConfig `yaml:"basic_auth"` BasicAuth *promauth.BasicAuthConfig `yaml:"basic_auth"`
BearerToken string `yaml:"bearer_token"` BearerToken string `yaml:"bearer_token"`
BearerTokenFile string `yaml:"bearer_token_file"` BearerTokenFile string `yaml:"bearer_token_file"`
TLSConfig *TLSConfig `yaml:"tls_config"` TLSConfig *promauth.TLSConfig `yaml:"tls_config"`
StaticConfigs []StaticConfig `yaml:"static_configs"` StaticConfigs []StaticConfig `yaml:"static_configs"`
FileSDConfigs []FileSDConfig `yaml:"file_sd_configs"` FileSDConfigs []FileSDConfig `yaml:"file_sd_configs"`
RelabelConfigs []promrelabel.RelabelConfig `yaml:"relabel_configs"` RelabelConfigs []promrelabel.RelabelConfig `yaml:"relabel_configs"`
@ -70,24 +67,6 @@ type FileSDConfig struct {
// `refresh_interval` is ignored. See `-prometheus.fileSDCheckInterval` // `refresh_interval` is ignored. See `-prometheus.fileSDCheckInterval`
} }
// TLSConfig represents TLS config.
//
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config
type TLSConfig struct {
CAFile string `yaml:"ca_file"`
CertFile string `yaml:"cert_file"`
KeyFile string `yaml:"key_file"`
ServerName string `yaml:"server_name"`
InsecureSkipVerify bool `yaml:"insecure_skip_verify"`
}
// BasicAuthConfig represents basic auth config.
type BasicAuthConfig struct {
Username string `yaml:"username"`
Password string `yaml:"password"`
PasswordFile string `yaml:"password_file"`
}
// StaticConfig represents essential parts for `static_config` section of Prometheus config. // StaticConfig represents essential parts for `static_config` section of Prometheus config.
// //
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#static_config // See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#static_config
@ -150,36 +129,7 @@ func (cfg *Config) fileSDConfigsCount() int {
} }
// getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg. // getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getFileSDScrapeWork(prev []ScrapeWork) ([]ScrapeWork, error) { func (cfg *Config) getFileSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
var sws []ScrapeWork
for i := range cfg.ScrapeConfigs {
var err error
sws, err = cfg.ScrapeConfigs[i].appendFileSDScrapeWork(sws, prev, cfg.baseDir)
if err != nil {
return nil, fmt.Errorf("error when parsing `scrape_config` #%d: %s", i+1, err)
}
}
return sws, nil
}
// getStaticScrapeWork returns `static_configs` ScrapeWork from from cfg.
func (cfg *Config) getStaticScrapeWork() ([]ScrapeWork, error) {
var sws []ScrapeWork
for i := range cfg.ScrapeConfigs {
var err error
sws, err = cfg.ScrapeConfigs[i].appendStaticScrapeWork(sws)
if err != nil {
return nil, fmt.Errorf("error when parsing `scrape_config` #%d: %s", i+1, err)
}
}
return sws, nil
}
func (sc *ScrapeConfig) appendFileSDScrapeWork(dst, prev []ScrapeWork, baseDir string) ([]ScrapeWork, error) {
if len(sc.FileSDConfigs) == 0 {
// Fast path - no `file_sd_configs`
return dst, nil
}
// Create a map for the previous scrape work. // Create a map for the previous scrape work.
swPrev := make(map[string][]ScrapeWork) swPrev := make(map[string][]ScrapeWork)
for i := range prev { for i := range prev {
@ -191,25 +141,24 @@ func (sc *ScrapeConfig) appendFileSDScrapeWork(dst, prev []ScrapeWork, baseDir s
swPrev[label.Value] = append(swPrev[label.Value], *sw) swPrev[label.Value] = append(swPrev[label.Value], *sw)
} }
} }
for i := range sc.FileSDConfigs { var dst []ScrapeWork
var err error for _, sc := range cfg.ScrapeConfigs {
dst, err = sc.FileSDConfigs[i].appendScrapeWork(dst, swPrev, baseDir, sc.swc) for _, sdc := range sc.FileSDConfigs {
if err != nil { dst = sdc.appendScrapeWork(dst, swPrev, cfg.baseDir, sc.swc)
return nil, fmt.Errorf("error when parsing `file_sd_config` #%d: %s", i+1, err)
} }
} }
return dst, nil return dst
} }
func (sc *ScrapeConfig) appendStaticScrapeWork(dst []ScrapeWork) ([]ScrapeWork, error) { // getStaticScrapeWork returns `static_configs` ScrapeWork from from cfg.
for i := range sc.StaticConfigs { func (cfg *Config) getStaticScrapeWork() []ScrapeWork {
var err error var dst []ScrapeWork
dst, err = sc.StaticConfigs[i].appendScrapeWork(dst, sc.swc) for _, sc := range cfg.ScrapeConfigs {
if err != nil { for _, stc := range sc.StaticConfigs {
return nil, fmt.Errorf("error when parsing `static_config` #%d: %s", i+1, err) dst = stc.appendScrapeWork(dst, sc.swc, nil)
} }
} }
return dst, nil return dst
} }
func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConfig) (*scrapeWorkConfig, error) { func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConfig) (*scrapeWorkConfig, error) {
@ -245,79 +194,10 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
return nil, fmt.Errorf("unexpected `scheme` for `job_name` %q: %q; supported values: http or https", jobName, scheme) return nil, fmt.Errorf("unexpected `scheme` for `job_name` %q: %q; supported values: http or https", jobName, scheme)
} }
params := sc.Params params := sc.Params
var authorization string ac, err := promauth.NewConfig(baseDir, sc.BasicAuth, sc.BearerToken, sc.BearerTokenFile, sc.TLSConfig)
if sc.BasicAuth != nil { if err != nil {
if sc.BasicAuth.Username == "" { return nil, fmt.Errorf("cannot parse auth config for `job_name` %q: %s", jobName, err)
return nil, fmt.Errorf("missing `username` in `basic_auth` section for `job_name` %q", jobName)
}
username := sc.BasicAuth.Username
password := sc.BasicAuth.Password
if sc.BasicAuth.PasswordFile != "" {
if sc.BasicAuth.Password != "" {
return nil, fmt.Errorf("both `password`=%q and `password_file`=%q are set in `basic_auth` section for `job_name` %q",
sc.BasicAuth.Password, sc.BasicAuth.PasswordFile, jobName)
}
path := getFilepath(baseDir, sc.BasicAuth.PasswordFile)
pass, err := readPasswordFromFile(path)
if err != nil {
return nil, fmt.Errorf("cannot read password from `password_file`=%q set in `basic_auth` section for `job_name` %q: %s",
sc.BasicAuth.PasswordFile, jobName, err)
}
password = pass
}
// See https://en.wikipedia.org/wiki/Basic_access_authentication
token := username + ":" + password
token64 := base64.StdEncoding.EncodeToString([]byte(token))
authorization = "Basic " + token64
} }
bearerToken := sc.BearerToken
if sc.BearerTokenFile != "" {
if sc.BearerToken != "" {
return nil, fmt.Errorf("both `bearer_token`=%q and `bearer_token_file`=%q are set for `job_name` %q", sc.BearerToken, sc.BearerTokenFile, jobName)
}
path := getFilepath(baseDir, sc.BearerTokenFile)
token, err := readPasswordFromFile(path)
if err != nil {
return nil, fmt.Errorf("cannot read bearer token from `bearer_token_file`=%q for `job_name` %q: %s", sc.BearerTokenFile, jobName, err)
}
bearerToken = token
}
if bearerToken != "" {
if authorization != "" {
return nil, fmt.Errorf("cannot use both `basic_auth` and `bearer_token` for `job_name` %q", jobName)
}
authorization = "Bearer " + bearerToken
}
var tlsRootCA *x509.CertPool
var tlsCertificate *tls.Certificate
tlsServerName := ""
tlsInsecureSkipVerify := false
if sc.TLSConfig != nil {
tlsServerName = sc.TLSConfig.ServerName
tlsInsecureSkipVerify = sc.TLSConfig.InsecureSkipVerify
if sc.TLSConfig.CertFile != "" || sc.TLSConfig.KeyFile != "" {
certPath := getFilepath(baseDir, sc.TLSConfig.CertFile)
keyPath := getFilepath(baseDir, sc.TLSConfig.KeyFile)
cert, err := tls.LoadX509KeyPair(certPath, keyPath)
if err != nil {
return nil, fmt.Errorf("cannot load TLS certificate for `job_name` %q from `cert_file`=%q, `key_file`=%q: %s",
jobName, sc.TLSConfig.CertFile, sc.TLSConfig.KeyFile, err)
}
tlsCertificate = &cert
}
if sc.TLSConfig.CAFile != "" {
path := getFilepath(baseDir, sc.TLSConfig.CAFile)
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("cannot read `ca_file` %q for `job_name` %q: %s", sc.TLSConfig.CAFile, jobName, err)
}
tlsRootCA = x509.NewCertPool()
if !tlsRootCA.AppendCertsFromPEM(data) {
return nil, fmt.Errorf("cannot parse data from `ca_file` %q for `job_name` %q", sc.TLSConfig.CAFile, jobName)
}
}
}
var err error
var relabelConfigs []promrelabel.ParsedRelabelConfig var relabelConfigs []promrelabel.ParsedRelabelConfig
relabelConfigs, err = promrelabel.ParseRelabelConfigs(relabelConfigs[:0], sc.RelabelConfigs) relabelConfigs, err = promrelabel.ParseRelabelConfigs(relabelConfigs[:0], sc.RelabelConfigs)
if err != nil { if err != nil {
@ -330,49 +210,40 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
} }
scrapeLimit := sc.ScrapeLimit scrapeLimit := sc.ScrapeLimit
swc := &scrapeWorkConfig{ swc := &scrapeWorkConfig{
scrapeInterval: scrapeInterval, scrapeInterval: scrapeInterval,
scrapeTimeout: scrapeTimeout, scrapeTimeout: scrapeTimeout,
jobName: jobName, jobName: jobName,
metricsPath: metricsPath, metricsPath: metricsPath,
scheme: scheme, scheme: scheme,
params: params, params: params,
authorization: authorization, authConfig: ac,
honorLabels: honorLabels, honorLabels: honorLabels,
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
externalLabels: globalCfg.ExternalLabels, externalLabels: globalCfg.ExternalLabels,
tlsRootCA: tlsRootCA, relabelConfigs: relabelConfigs,
tlsCertificate: tlsCertificate, metricRelabelConfigs: metricRelabelConfigs,
tlsServerName: tlsServerName, scrapeLimit: scrapeLimit,
tlsInsecureSkipVerify: tlsInsecureSkipVerify,
relabelConfigs: relabelConfigs,
metricRelabelConfigs: metricRelabelConfigs,
scrapeLimit: scrapeLimit,
} }
return swc, nil return swc, nil
} }
type scrapeWorkConfig struct { type scrapeWorkConfig struct {
scrapeInterval time.Duration scrapeInterval time.Duration
scrapeTimeout time.Duration scrapeTimeout time.Duration
jobName string jobName string
metricsPath string metricsPath string
scheme string scheme string
params map[string][]string params map[string][]string
authorization string authConfig *promauth.Config
honorLabels bool honorLabels bool
honorTimestamps bool honorTimestamps bool
externalLabels map[string]string externalLabels map[string]string
tlsRootCA *x509.CertPool relabelConfigs []promrelabel.ParsedRelabelConfig
tlsCertificate *tls.Certificate metricRelabelConfigs []promrelabel.ParsedRelabelConfig
tlsServerName string scrapeLimit int
tlsInsecureSkipVerify bool
relabelConfigs []promrelabel.ParsedRelabelConfig
metricRelabelConfigs []promrelabel.ParsedRelabelConfig
scrapeLimit int
metaLabels map[string]string
} }
func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]ScrapeWork, baseDir string, swc *scrapeWorkConfig) ([]ScrapeWork, error) { func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]ScrapeWork, baseDir string, swc *scrapeWorkConfig) []ScrapeWork {
for _, file := range sdc.Files { for _, file := range sdc.Files {
pathPattern := getFilepath(baseDir, file) pathPattern := getFilepath(baseDir, file)
paths := []string{pathPattern} paths := []string{pathPattern}
@ -380,7 +251,9 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]
var err error var err error
paths, err = filepath.Glob(pathPattern) paths, err = filepath.Glob(pathPattern)
if err != nil { if err != nil {
return nil, fmt.Errorf("invalid pattern %q in `files` section: %s", file, err) // Do not return this error, since other files may contain valid scrape configs.
logger.Errorf("invalid pattern %q in `files` section: %s; skipping it", file, err)
continue
} }
} }
for _, path := range paths { for _, path := range paths {
@ -396,7 +269,6 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]
} }
continue continue
} }
swcCopy := *swc
pathShort := path pathShort := path
if strings.HasPrefix(pathShort, baseDir) { if strings.HasPrefix(pathShort, baseDir) {
pathShort = path[len(baseDir):] pathShort = path[len(baseDir):]
@ -404,88 +276,89 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swPrev map[string][]
pathShort = pathShort[1:] pathShort = pathShort[1:]
} }
} }
swcCopy.metaLabels = map[string]string{ metaLabels := map[string]string{
"__meta_filepath": pathShort, "__meta_filepath": pathShort,
} }
for i := range stcs { for i := range stcs {
dst, err = stcs[i].appendScrapeWork(dst, &swcCopy) dst = stcs[i].appendScrapeWork(dst, swc, metaLabels)
if err != nil {
// Do not return this error, since other paths may contain valid scrape configs.
logger.Errorf("error when parsing `static_config` #%d from %q: %s", i+1, path, err)
continue
}
} }
} }
} }
return dst, nil return dst
} }
func (stc *StaticConfig) appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig) ([]ScrapeWork, error) { func (stc *StaticConfig) appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, metaLabels map[string]string) []ScrapeWork {
for _, target := range stc.Targets { for _, target := range stc.Targets {
if target == "" { if target == "" {
return nil, fmt.Errorf("`static_configs` target for `job_name` %q cannot be empty", swc.jobName) // Do not return this error, since other targets may be valid
logger.Errorf("`static_configs` target for `job_name` %q cannot be empty; skipping it", swc.jobName)
continue
} }
labels, err := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, stc.Labels, swc.externalLabels, swc.metaLabels, swc.params) var err error
dst, err = appendScrapeWork(dst, swc, target, stc.Labels, metaLabels)
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot merge labels for `static_configs` target for `job_name` %q: %s", swc.jobName, err) // Do not return this error, since other targets may be valid
} logger.Errorf("error when parsing `static_configs` target %q for `job_name` %q: %s; skipping it", target, swc.jobName, err)
labels = promrelabel.ApplyRelabelConfigs(labels, 0, swc.relabelConfigs, false)
if len(labels) == 0 {
// Drop target without labels.
continue continue
} }
// See https://www.robustperception.io/life-of-a-label
schemeRelabeled := ""
if schemeLabel := promrelabel.GetLabelByName(labels, "__scheme__"); schemeLabel != nil {
schemeRelabeled = schemeLabel.Value
}
if schemeRelabeled == "" {
schemeRelabeled = "http"
}
addressLabel := promrelabel.GetLabelByName(labels, "__address__")
if addressLabel == nil || addressLabel.Name == "" {
// Drop target without scrape address.
continue
}
targetRelabeled := addMissingPort(schemeRelabeled, addressLabel.Value)
if strings.Contains(targetRelabeled, "/") {
// Drop target with '/'
continue
}
metricsPathRelabeled := ""
if metricsPathLabel := promrelabel.GetLabelByName(labels, "__metrics_path__"); metricsPathLabel != nil {
metricsPathRelabeled = metricsPathLabel.Value
}
if metricsPathRelabeled == "" {
metricsPathRelabeled = "/metrics"
}
paramsRelabeled := getParamsFromLabels(labels, swc.params)
optionalQuestion := "?"
if len(paramsRelabeled) == 0 || strings.Contains(metricsPathRelabeled, "?") {
optionalQuestion = ""
}
paramsStr := url.Values(paramsRelabeled).Encode()
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", schemeRelabeled, targetRelabeled, metricsPathRelabeled, optionalQuestion, paramsStr)
if _, err := url.Parse(scrapeURL); err != nil {
return nil, fmt.Errorf("invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %s",
scrapeURL, swc.scheme, schemeRelabeled, target, targetRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
}
dst = append(dst, ScrapeWork{
ScrapeURL: scrapeURL,
ScrapeInterval: swc.scrapeInterval,
ScrapeTimeout: swc.scrapeTimeout,
HonorLabels: swc.honorLabels,
HonorTimestamps: swc.honorTimestamps,
Labels: labels,
Authorization: swc.authorization,
TLSRootCA: swc.tlsRootCA,
TLSCertificate: swc.tlsCertificate,
TLSServerName: swc.tlsServerName,
TLSInsecureSkipVerify: swc.tlsInsecureSkipVerify,
MetricRelabelConfigs: swc.metricRelabelConfigs,
ScrapeLimit: swc.scrapeLimit,
})
} }
return dst
}
func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, extraLabels, metaLabels map[string]string) ([]ScrapeWork, error) {
labels := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, extraLabels, swc.externalLabels, metaLabels, swc.params)
labels = promrelabel.ApplyRelabelConfigs(labels, 0, swc.relabelConfigs, false)
if len(labels) == 0 {
// Drop target without labels.
return dst, nil
}
// See https://www.robustperception.io/life-of-a-label
schemeRelabeled := ""
if schemeLabel := promrelabel.GetLabelByName(labels, "__scheme__"); schemeLabel != nil {
schemeRelabeled = schemeLabel.Value
}
if schemeRelabeled == "" {
schemeRelabeled = "http"
}
addressLabel := promrelabel.GetLabelByName(labels, "__address__")
if addressLabel == nil || addressLabel.Name == "" {
// Drop target without scrape address.
return dst, nil
}
targetRelabeled := addMissingPort(schemeRelabeled, addressLabel.Value)
if strings.Contains(targetRelabeled, "/") {
// Drop target with '/'
return dst, nil
}
metricsPathRelabeled := ""
if metricsPathLabel := promrelabel.GetLabelByName(labels, "__metrics_path__"); metricsPathLabel != nil {
metricsPathRelabeled = metricsPathLabel.Value
}
if metricsPathRelabeled == "" {
metricsPathRelabeled = "/metrics"
}
paramsRelabeled := getParamsFromLabels(labels, swc.params)
optionalQuestion := "?"
if len(paramsRelabeled) == 0 || strings.Contains(metricsPathRelabeled, "?") {
optionalQuestion = ""
}
paramsStr := url.Values(paramsRelabeled).Encode()
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", schemeRelabeled, targetRelabeled, metricsPathRelabeled, optionalQuestion, paramsStr)
if _, err := url.Parse(scrapeURL); err != nil {
return dst, fmt.Errorf("invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %s",
scrapeURL, swc.scheme, schemeRelabeled, target, targetRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
}
dst = append(dst, ScrapeWork{
ScrapeURL: scrapeURL,
ScrapeInterval: swc.scrapeInterval,
ScrapeTimeout: swc.scrapeTimeout,
HonorLabels: swc.honorLabels,
HonorTimestamps: swc.honorTimestamps,
Labels: labels,
AuthConfig: swc.authConfig,
MetricRelabelConfigs: swc.metricRelabelConfigs,
ScrapeLimit: swc.scrapeLimit,
})
return dst, nil return dst, nil
} }
@ -507,7 +380,7 @@ func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]s
return m return m
} }
func mergeLabels(job, scheme, target, metricsPath string, labels, externalLabels, metaLabels map[string]string, params map[string][]string) ([]prompbmarshal.Label, error) { func mergeLabels(job, scheme, target, metricsPath string, extraLabels, externalLabels, metaLabels map[string]string, params map[string][]string) []prompbmarshal.Label {
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config // See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
m := make(map[string]string) m := make(map[string]string)
for k, v := range externalLabels { for k, v := range externalLabels {
@ -525,7 +398,7 @@ func mergeLabels(job, scheme, target, metricsPath string, labels, externalLabels
v := args[0] v := args[0]
m[k] = v m[k] = v
} }
for k, v := range labels { for k, v := range extraLabels {
m[k] = v m[k] = v
} }
for k, v := range metaLabels { for k, v := range metaLabels {
@ -538,7 +411,7 @@ func mergeLabels(job, scheme, target, metricsPath string, labels, externalLabels
Value: v, Value: v,
}) })
} }
return result, nil return result
} }
func getFilepath(baseDir, path string) string { func getFilepath(baseDir, path string) string {
@ -548,15 +421,6 @@ func getFilepath(baseDir, path string) string {
return filepath.Join(baseDir, path) return filepath.Join(baseDir, path)
} }
func readPasswordFromFile(path string) (string, error) {
data, err := ioutil.ReadFile(path)
if err != nil {
return "", err
}
pass := strings.TrimRightFunc(string(data), unicode.IsSpace)
return pass, nil
}
func addMissingPort(scheme, target string) string { func addMissingPort(scheme, target string) string {
if strings.Contains(target, ":") { if strings.Contains(target, ":") {
return target return target

View file

@ -8,6 +8,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal" "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel" "github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
) )
@ -79,10 +80,7 @@ scrape_configs:
if err := cfg.parse([]byte(data), "sss"); err != nil { if err := cfg.parse([]byte(data), "sss"); err != nil {
t.Fatalf("cannot parase data: %s", err) t.Fatalf("cannot parase data: %s", err)
} }
sws, err := cfg.getFileSDScrapeWork(nil) sws := cfg.getFileSDScrapeWork(nil)
if err != nil {
t.Fatalf("cannot obtain `file_sd_config`: %s", err)
}
if !equalStaticConfigForScrapeWorks(sws, sws) { if !equalStaticConfigForScrapeWorks(sws, sws) {
t.Fatalf("unexpected non-equal static configs;\nsws:\n%#v", sws) t.Fatalf("unexpected non-equal static configs;\nsws:\n%#v", sws)
} }
@ -98,10 +96,7 @@ scrape_configs:
if err := cfgNew.parse([]byte(dataNew), "sss"); err != nil { if err := cfgNew.parse([]byte(dataNew), "sss"); err != nil {
t.Fatalf("cannot parse data: %s", err) t.Fatalf("cannot parse data: %s", err)
} }
swsNew, err := cfgNew.getFileSDScrapeWork(sws) swsNew := cfgNew.getFileSDScrapeWork(sws)
if err != nil {
t.Fatalf("cannot obtain `file_sd_config`: %s", err)
}
if equalStaticConfigForScrapeWorks(swsNew, sws) { if equalStaticConfigForScrapeWorks(swsNew, sws) {
t.Fatalf("unexpected equal static configs;\nswsNew:\n%#v\nsws:\n%#v", swsNew, sws) t.Fatalf("unexpected equal static configs;\nswsNew:\n%#v\nsws:\n%#v", swsNew, sws)
} }
@ -116,10 +111,7 @@ scrape_configs:
if err := cfg.parse([]byte(data), "sss"); err != nil { if err := cfg.parse([]byte(data), "sss"); err != nil {
t.Fatalf("cannot parse data: %s", err) t.Fatalf("cannot parse data: %s", err)
} }
sws, err = cfg.getFileSDScrapeWork(swsNew) sws = cfg.getFileSDScrapeWork(swsNew)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if len(sws) != 0 { if len(sws) != 0 {
t.Fatalf("unexpected non-empty sws:\n%#v", sws) t.Fatalf("unexpected non-empty sws:\n%#v", sws)
} }
@ -134,10 +126,7 @@ scrape_configs:
if err := cfg.parse([]byte(data), "sss"); err != nil { if err := cfg.parse([]byte(data), "sss"); err != nil {
t.Fatalf("cannot parse data: %s", err) t.Fatalf("cannot parse data: %s", err)
} }
sws, err = cfg.getFileSDScrapeWork(swsNew) sws = cfg.getFileSDScrapeWork(swsNew)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if len(sws) != 0 { if len(sws) != 0 {
t.Fatalf("unexpected non-empty sws:\n%#v", sws) t.Fatalf("unexpected non-empty sws:\n%#v", sws)
} }
@ -148,7 +137,7 @@ func getFileSDScrapeWork(data []byte, path string) ([]ScrapeWork, error) {
if err := cfg.parse(data, path); err != nil { if err := cfg.parse(data, path); err != nil {
return nil, fmt.Errorf("cannot parse data: %s", err) return nil, fmt.Errorf("cannot parse data: %s", err)
} }
return cfg.getFileSDScrapeWork(nil) return cfg.getFileSDScrapeWork(nil), nil
} }
func getStaticScrapeWork(data []byte, path string) ([]ScrapeWork, error) { func getStaticScrapeWork(data []byte, path string) ([]ScrapeWork, error) {
@ -156,7 +145,7 @@ func getStaticScrapeWork(data []byte, path string) ([]ScrapeWork, error) {
if err := cfg.parse(data, path); err != nil { if err := cfg.parse(data, path); err != nil {
return nil, fmt.Errorf("cannot parse data: %s", err) return nil, fmt.Errorf("cannot parse data: %s", err)
} }
return cfg.getStaticScrapeWork() return cfg.getStaticScrapeWork(), nil
} }
func TestGetStaticScrapeWorkFailure(t *testing.T) { func TestGetStaticScrapeWorkFailure(t *testing.T) {
@ -190,22 +179,6 @@ scrape_configs:
- targets: ["foo"] - targets: ["foo"]
`) `)
// Empty target
f(`
scrape_configs:
- job_name: x
static_configs:
- targets: ["foo", ""]
`)
// Invalid url
f(`
scrape_configs:
- job_name: x
static_configs:
- targets: ["a b"]
`)
// Missing username in `basic_auth` // Missing username in `basic_auth`
f(` f(`
scrape_configs: scrape_configs:
@ -456,6 +429,7 @@ scrape_configs:
Value: "rty", Value: "rty",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
{ {
ScrapeURL: "http://host2:80/abc/de", ScrapeURL: "http://host2:80/abc/de",
@ -489,6 +463,7 @@ scrape_configs:
Value: "rty", Value: "rty",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
{ {
ScrapeURL: "http://localhost:9090/abc/de", ScrapeURL: "http://localhost:9090/abc/de",
@ -522,6 +497,7 @@ scrape_configs:
Value: "test", Value: "test",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
} }
@ -568,6 +544,7 @@ scrape_configs:
Value: "foo", Value: "foo",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
f(` f(`
@ -612,6 +589,7 @@ scrape_configs:
Value: "xxx", Value: "xxx",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
f(` f(`
@ -676,7 +654,9 @@ scrape_configs:
Value: "y", Value: "y",
}, },
}, },
Authorization: "Bearer xyz", AuthConfig: &promauth.Config{
Authorization: "Bearer xyz",
},
}, },
{ {
ScrapeURL: "https://aaa:443/foo/bar?p=x%26y&p=%3D", ScrapeURL: "https://aaa:443/foo/bar?p=x%26y&p=%3D",
@ -710,7 +690,9 @@ scrape_configs:
Value: "y", Value: "y",
}, },
}, },
Authorization: "Bearer xyz", AuthConfig: &promauth.Config{
Authorization: "Bearer xyz",
},
}, },
{ {
ScrapeURL: "http://1.2.3.4:80/metrics", ScrapeURL: "http://1.2.3.4:80/metrics",
@ -736,9 +718,11 @@ scrape_configs:
Value: "qwer", Value: "qwer",
}, },
}, },
Authorization: "Basic dXNlcjpwYXNz", AuthConfig: &promauth.Config{
TLSServerName: "foobar", Authorization: "Basic dXNlcjpwYXNz",
TLSInsecureSkipVerify: true, TLSServerName: "foobar",
TLSInsecureSkipVerify: true,
},
}, },
}) })
f(` f(`
@ -807,6 +791,7 @@ scrape_configs:
Value: "http://foo.bar:1234/metrics", Value: "http://foo.bar:1234/metrics",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
f(` f(`
@ -867,6 +852,7 @@ scrape_configs:
Value: "https", Value: "https",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
f(` f(`
@ -904,6 +890,7 @@ scrape_configs:
Value: "3", Value: "3",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
f(` f(`
@ -937,6 +924,7 @@ scrape_configs:
Value: "foo", Value: "foo",
}, },
}, },
AuthConfig: &promauth.Config{},
MetricRelabelConfigs: []promrelabel.ParsedRelabelConfig{ MetricRelabelConfigs: []promrelabel.ParsedRelabelConfig{
{ {
SourceLabels: []string{"foo"}, SourceLabels: []string{"foo"},
@ -980,7 +968,9 @@ scrape_configs:
Value: "foo", Value: "foo",
}, },
}, },
Authorization: "Basic eHl6OnNlY3JldC1wYXNz", AuthConfig: &promauth.Config{
Authorization: "Basic eHl6OnNlY3JldC1wYXNz",
},
}, },
}) })
f(` f(`
@ -1012,7 +1002,9 @@ scrape_configs:
Value: "foo", Value: "foo",
}, },
}, },
Authorization: "Bearer secret-pass", AuthConfig: &promauth.Config{
Authorization: "Bearer secret-pass",
},
}, },
}) })
snakeoilCert, err := tls.LoadX509KeyPair("testdata/ssl-cert-snakeoil.pem", "testdata/ssl-cert-snakeoil.key") snakeoilCert, err := tls.LoadX509KeyPair("testdata/ssl-cert-snakeoil.pem", "testdata/ssl-cert-snakeoil.key")
@ -1050,7 +1042,9 @@ scrape_configs:
Value: "foo", Value: "foo",
}, },
}, },
TLSCertificate: &snakeoilCert, AuthConfig: &promauth.Config{
TLSCertificate: &snakeoilCert,
},
}, },
}) })
f(` f(`
@ -1107,6 +1101,7 @@ scrape_configs:
Value: "qwe", Value: "qwe",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
f(` f(`
@ -1160,6 +1155,7 @@ scrape_configs:
Value: "snmp", Value: "snmp",
}, },
}, },
AuthConfig: &promauth.Config{},
}, },
}) })
} }

View file

@ -56,14 +56,8 @@ func runScraper(configFile string, pushData func(wr *prompbmarshal.WriteRequest)
if err != nil { if err != nil {
logger.Fatalf("cannot read %q: %s", configFile, err) logger.Fatalf("cannot read %q: %s", configFile, err)
} }
swsStatic, err := cfg.getStaticScrapeWork() swsStatic := cfg.getStaticScrapeWork()
if err != nil { swsFileSD := cfg.getFileSDScrapeWork(nil)
logger.Fatalf("cannot parse `static_configs` from %q: %s", configFile, err)
}
swsFileSD, err := cfg.getFileSDScrapeWork(nil)
if err != nil {
logger.Fatalf("cannot parse `file_sd_config` from %q: %s", configFile, err)
}
mustStop := false mustStop := false
for !mustStop { for !mustStop {
@ -89,18 +83,9 @@ func runScraper(configFile string, pushData func(wr *prompbmarshal.WriteRequest)
logger.Errorf("cannot read %q: %s; continuing with the previous config", configFile, err) logger.Errorf("cannot read %q: %s; continuing with the previous config", configFile, err)
goto waitForChans goto waitForChans
} }
swsStaticNew, err := cfgNew.getStaticScrapeWork()
if err != nil {
logger.Errorf("cannot parse `static_configs` from %q: %s; continuing with the previous config", configFile, err)
goto waitForChans
}
swsFileSDNew, err := cfgNew.getFileSDScrapeWork(swsFileSD)
if err != nil {
logger.Errorf("cannot parse `file_sd_config` from %q: %s; continuing with the previous config", configFile, err)
}
cfg = cfgNew cfg = cfgNew
swsStatic = swsStaticNew swsStatic = cfg.getStaticScrapeWork()
swsFileSD = swsFileSDNew swsFileSD = cfg.getFileSDScrapeWork(swsFileSD)
case <-globalStopCh: case <-globalStopCh:
mustStop = true mustStop = true
} }
@ -151,10 +136,7 @@ func runFileSDScrapers(sws []ScrapeWork, cfg *Config, pushData func(wr *prompbma
waitForChans: waitForChans:
select { select {
case <-ticker.C: case <-ticker.C:
swsNew, err := cfg.getFileSDScrapeWork(sws) swsNew := cfg.getFileSDScrapeWork(sws)
if err != nil {
logger.Panicf("BUG: error when re-reading `file_sd_config` targets the second time: %s", err)
}
if equalStaticConfigForScrapeWorks(swsNew, sws) { if equalStaticConfigForScrapeWorks(swsNew, sws) {
// Nothing changed, continue waiting for updated scrape work // Nothing changed, continue waiting for updated scrape work
goto waitForChans goto waitForChans
@ -173,7 +155,7 @@ func runFileSDScrapers(sws []ScrapeWork, cfg *Config, pushData func(wr *prompbma
var ( var (
fileSDTargets = metrics.NewCounter(`vm_promscrape_targets{type="file_sd"}`) fileSDTargets = metrics.NewCounter(`vm_promscrape_targets{type="file_sd"}`)
fileSDReloads = metrics.NewCounter(`vm_promscrape_file_sd_reloads_total`) fileSDReloads = metrics.NewCounter(`vm_promscrape_reloads_total{type="file_sd"}`)
) )
func equalStaticConfigForScrapeWorks(as, bs []ScrapeWork) bool { func equalStaticConfigForScrapeWorks(as, bs []ScrapeWork) bool {

View file

@ -1,13 +1,12 @@
package promscrape package promscrape
import ( import (
"crypto/tls"
"crypto/x509"
"math/rand" "math/rand"
"time" "time"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil" "github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger" "github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal" "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel" "github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
parser "github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/prometheus" parser "github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/prometheus"
@ -48,16 +47,8 @@ type ScrapeWork struct {
// See also https://prometheus.io/docs/concepts/jobs_instances/ // See also https://prometheus.io/docs/concepts/jobs_instances/
Labels []prompbmarshal.Label Labels []prompbmarshal.Label
// Optional `Authorization` header. // Auth config
// AuthConfig *promauth.Config
// It may contain `Basic ....` or `Bearer ....` string.
Authorization string
// Optional TLS config
TLSRootCA *x509.CertPool
TLSCertificate *tls.Certificate
TLSServerName string
TLSInsecureSkipVerify bool
// Optional `metric_relabel_configs`. // Optional `metric_relabel_configs`.
MetricRelabelConfigs []promrelabel.ParsedRelabelConfig MetricRelabelConfigs []promrelabel.ParsedRelabelConfig

View file

@ -1 +1 @@
- targets: ["foo", ""] - targets: [""]