2020-02-23 11:35:47 +00:00
|
|
|
package promscrape
|
|
|
|
|
|
|
|
import (
|
2020-04-13 10:15:30 +00:00
|
|
|
"flag"
|
2020-02-23 11:35:47 +00:00
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/url"
|
2020-05-21 11:54:28 +00:00
|
|
|
"os"
|
2020-02-23 11:35:47 +00:00
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
2020-04-14 10:08:48 +00:00
|
|
|
"sync/atomic"
|
2020-02-23 11:35:47 +00:00
|
|
|
"time"
|
|
|
|
|
2020-08-13 13:43:55 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/envtemplate"
|
2020-02-23 11:35:47 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
2020-04-13 09:59:05 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
2020-02-23 11:35:47 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
2020-05-04 17:48:02 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/consul"
|
2020-05-05 21:01:49 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/dns"
|
2020-10-12 10:38:21 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/dockerswarm"
|
2020-04-27 16:25:45 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/ec2"
|
2020-04-24 14:50:21 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/gce"
|
2020-04-13 18:02:27 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/kubernetes"
|
2020-10-05 13:45:33 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/openstack"
|
2020-02-23 11:35:47 +00:00
|
|
|
"gopkg.in/yaml.v2"
|
|
|
|
)
|
|
|
|
|
2020-04-13 10:15:30 +00:00
|
|
|
var (
|
|
|
|
strictParse = flag.Bool("promscrape.config.strictParse", false, "Whether to allow only supported fields in '-promscrape.config'. "+
|
|
|
|
"This option may be used for errors detection in '-promscrape.config' file")
|
2020-05-21 11:54:28 +00:00
|
|
|
dryRun = flag.Bool("promscrape.config.dryRun", false, "Checks -promscrape.config file for errors and unsupported fields and then exits. "+
|
|
|
|
"Returns non-zero exit code on parsing errors and emits these errors to stderr. "+
|
|
|
|
"Pass -loggerLevel=ERROR if you don't need to see info messages in the output")
|
2020-04-13 10:15:30 +00:00
|
|
|
)
|
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
// Config represents essential parts from Prometheus config defined at https://prometheus.io/docs/prometheus/latest/configuration/configuration/
|
|
|
|
type Config struct {
|
|
|
|
Global GlobalConfig `yaml:"global"`
|
|
|
|
ScrapeConfigs []ScrapeConfig `yaml:"scrape_configs"`
|
|
|
|
|
|
|
|
// This is set to the directory from where the config has been loaded.
|
|
|
|
baseDir string
|
|
|
|
}
|
|
|
|
|
|
|
|
// GlobalConfig represents essential parts for `global` section of Prometheus config.
|
|
|
|
//
|
|
|
|
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/
|
|
|
|
type GlobalConfig struct {
|
|
|
|
ScrapeInterval time.Duration `yaml:"scrape_interval"`
|
|
|
|
ScrapeTimeout time.Duration `yaml:"scrape_timeout"`
|
|
|
|
ExternalLabels map[string]string `yaml:"external_labels"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// ScrapeConfig represents essential parts for `scrape_config` section of Prometheus config.
|
|
|
|
//
|
|
|
|
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config
|
|
|
|
type ScrapeConfig struct {
|
|
|
|
JobName string `yaml:"job_name"`
|
|
|
|
ScrapeInterval time.Duration `yaml:"scrape_interval"`
|
|
|
|
ScrapeTimeout time.Duration `yaml:"scrape_timeout"`
|
|
|
|
MetricsPath string `yaml:"metrics_path"`
|
|
|
|
HonorLabels bool `yaml:"honor_labels"`
|
|
|
|
HonorTimestamps bool `yaml:"honor_timestamps"`
|
|
|
|
Scheme string `yaml:"scheme"`
|
|
|
|
Params map[string][]string `yaml:"params"`
|
2020-04-13 09:59:05 +00:00
|
|
|
BasicAuth *promauth.BasicAuthConfig `yaml:"basic_auth"`
|
2020-02-23 11:35:47 +00:00
|
|
|
BearerToken string `yaml:"bearer_token"`
|
|
|
|
BearerTokenFile string `yaml:"bearer_token_file"`
|
2020-04-13 09:59:05 +00:00
|
|
|
TLSConfig *promauth.TLSConfig `yaml:"tls_config"`
|
2020-02-23 11:35:47 +00:00
|
|
|
StaticConfigs []StaticConfig `yaml:"static_configs"`
|
|
|
|
FileSDConfigs []FileSDConfig `yaml:"file_sd_configs"`
|
2020-04-23 08:34:04 +00:00
|
|
|
KubernetesSDConfigs []kubernetes.SDConfig `yaml:"kubernetes_sd_configs"`
|
2020-10-05 13:45:33 +00:00
|
|
|
OpenStackSDConfigs []openstack.SDConfig `yaml:"openstack_sd_configs"`
|
2020-05-04 17:48:02 +00:00
|
|
|
ConsulSDConfigs []consul.SDConfig `yaml:"consul_sd_configs"`
|
2020-10-12 10:38:21 +00:00
|
|
|
DockerSwarmConfigs []dockerswarm.SDConfig `yaml:"dockerswarm_sd_configs"`
|
2020-05-05 21:01:49 +00:00
|
|
|
DNSSDConfigs []dns.SDConfig `yaml:"dns_sd_configs"`
|
2020-04-27 16:25:45 +00:00
|
|
|
EC2SDConfigs []ec2.SDConfig `yaml:"ec2_sd_configs"`
|
2020-04-24 14:50:21 +00:00
|
|
|
GCESDConfigs []gce.SDConfig `yaml:"gce_sd_configs"`
|
2020-02-23 11:35:47 +00:00
|
|
|
RelabelConfigs []promrelabel.RelabelConfig `yaml:"relabel_configs"`
|
|
|
|
MetricRelabelConfigs []promrelabel.RelabelConfig `yaml:"metric_relabel_configs"`
|
2020-04-14 08:58:15 +00:00
|
|
|
SampleLimit int `yaml:"sample_limit"`
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2020-07-02 11:19:11 +00:00
|
|
|
// These options are supported only by lib/promscrape.
|
|
|
|
DisableCompression bool `yaml:"disable_compression"`
|
|
|
|
DisableKeepAlive bool `yaml:"disable_keepalive"`
|
2020-11-01 21:12:13 +00:00
|
|
|
StreamParse bool `yaml:"stream_parse"`
|
2020-07-02 11:19:11 +00:00
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
// This is set in loadConfig
|
|
|
|
swc *scrapeWorkConfig
|
|
|
|
}
|
|
|
|
|
|
|
|
// FileSDConfig represents file-based service discovery config.
|
|
|
|
//
|
|
|
|
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#file_sd_config
|
|
|
|
type FileSDConfig struct {
|
|
|
|
Files []string `yaml:"files"`
|
|
|
|
// `refresh_interval` is ignored. See `-prometheus.fileSDCheckInterval`
|
|
|
|
}
|
|
|
|
|
|
|
|
// StaticConfig represents essential parts for `static_config` section of Prometheus config.
|
|
|
|
//
|
|
|
|
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#static_config
|
|
|
|
type StaticConfig struct {
|
|
|
|
Targets []string `yaml:"targets"`
|
|
|
|
Labels map[string]string `yaml:"labels"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func loadStaticConfigs(path string) ([]StaticConfig, error) {
|
|
|
|
data, err := ioutil.ReadFile(path)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot read `static_configs` from %q: %w", path, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
2020-08-13 13:43:55 +00:00
|
|
|
data = envtemplate.Replace(data)
|
2020-02-23 11:35:47 +00:00
|
|
|
var stcs []StaticConfig
|
2020-03-06 18:18:28 +00:00
|
|
|
if err := yaml.UnmarshalStrict(data, &stcs); err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot unmarshal `static_configs` from %q: %w", path, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
return stcs, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// loadConfig loads Prometheus config from the given path.
|
2020-04-23 20:40:50 +00:00
|
|
|
func loadConfig(path string) (cfg *Config, data []byte, err error) {
|
|
|
|
data, err = ioutil.ReadFile(path)
|
2020-02-23 11:35:47 +00:00
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, nil, fmt.Errorf("cannot read Prometheus config from %q: %w", path, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
var cfgObj Config
|
|
|
|
if err := cfgObj.parse(data, path); err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, nil, fmt.Errorf("cannot parse Prometheus config from %q: %w", path, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
2020-05-21 11:54:28 +00:00
|
|
|
if *dryRun {
|
|
|
|
// This is a dirty hack for checking Prometheus config only.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/362
|
|
|
|
// and https://github.com/VictoriaMetrics/VictoriaMetrics/issues/508 for details.
|
|
|
|
logger.Infof("Success: the config at %q has no errors; exitting with 0 status code", path)
|
|
|
|
os.Exit(0)
|
|
|
|
}
|
2020-04-23 20:40:50 +00:00
|
|
|
return &cfgObj, data, nil
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (cfg *Config) parse(data []byte, path string) error {
|
2020-04-13 10:15:30 +00:00
|
|
|
if err := unmarshalMaybeStrict(data, cfg); err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return fmt.Errorf("cannot unmarshal data: %w", err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
absPath, err := filepath.Abs(path)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return fmt.Errorf("cannot obtain abs path for %q: %w", path, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
cfg.baseDir = filepath.Dir(absPath)
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
|
|
|
swc, err := getScrapeWorkConfig(sc, cfg.baseDir, &cfg.Global)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return fmt.Errorf("cannot parse `scrape_config` #%d: %w", i+1, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
sc.swc = swc
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-04-13 10:15:30 +00:00
|
|
|
func unmarshalMaybeStrict(data []byte, dst interface{}) error {
|
2020-08-13 13:43:55 +00:00
|
|
|
data = envtemplate.Replace(data)
|
2020-04-13 10:15:30 +00:00
|
|
|
var err error
|
2020-05-21 11:54:28 +00:00
|
|
|
if *strictParse || *dryRun {
|
2020-04-13 10:15:30 +00:00
|
|
|
err = yaml.UnmarshalStrict(data, dst)
|
|
|
|
} else {
|
|
|
|
err = yaml.Unmarshal(data, dst)
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-06-23 12:35:19 +00:00
|
|
|
func getSWSByJob(sws []ScrapeWork) map[string][]ScrapeWork {
|
|
|
|
m := make(map[string][]ScrapeWork)
|
|
|
|
for _, sw := range sws {
|
|
|
|
m[sw.jobNameOriginal] = append(m[sw.jobNameOriginal], sw)
|
|
|
|
}
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
2020-04-23 11:38:12 +00:00
|
|
|
// getKubernetesSDScrapeWork returns `kubernetes_sd_configs` ScrapeWork from cfg.
|
2020-06-23 12:35:19 +00:00
|
|
|
func (cfg *Config) getKubernetesSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
2020-04-13 18:02:27 +00:00
|
|
|
var dst []ScrapeWork
|
2020-04-23 11:38:12 +00:00
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
2020-06-23 12:35:19 +00:00
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
2020-04-23 11:38:12 +00:00
|
|
|
for j := range sc.KubernetesSDConfigs {
|
|
|
|
sdc := &sc.KubernetesSDConfigs[j]
|
2020-06-23 12:35:19 +00:00
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendKubernetesScrapeWork(dst, sdc, cfg.baseDir, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering kubernetes targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
2020-04-13 18:02:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-10-05 13:45:33 +00:00
|
|
|
// getOpenStackSDScrapeWork returns `openstack_sd_configs` ScrapeWork from cfg.
|
|
|
|
func (cfg *Config) getOpenStackSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
|
|
|
var dst []ScrapeWork
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
|
|
|
for j := range sc.OpenStackSDConfigs {
|
|
|
|
sdc := &sc.OpenStackSDConfigs[j]
|
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendOpenstackScrapeWork(dst, sdc, cfg.baseDir, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering openstack targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-10-12 10:38:21 +00:00
|
|
|
// getDockerSwarmSDScrapeWork returns `dockerswarm_sd_configs` ScrapeWork from cfg.
|
|
|
|
func (cfg *Config) getDockerSwarmSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
|
|
|
var dst []ScrapeWork
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
|
|
|
for j := range sc.DockerSwarmConfigs {
|
|
|
|
sdc := &sc.DockerSwarmConfigs[j]
|
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendDockerSwarmScrapeWork(dst, sdc, cfg.baseDir, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering dockerswarm targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-05-04 17:48:02 +00:00
|
|
|
// getConsulSDScrapeWork returns `consul_sd_configs` ScrapeWork from cfg.
|
2020-06-23 12:35:19 +00:00
|
|
|
func (cfg *Config) getConsulSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
2020-05-04 17:48:02 +00:00
|
|
|
var dst []ScrapeWork
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
2020-06-23 12:35:19 +00:00
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
2020-05-04 17:48:02 +00:00
|
|
|
for j := range sc.ConsulSDConfigs {
|
|
|
|
sdc := &sc.ConsulSDConfigs[j]
|
2020-06-23 12:35:19 +00:00
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendConsulScrapeWork(dst, sdc, cfg.baseDir, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering consul targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
2020-05-04 17:48:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-05-05 21:01:49 +00:00
|
|
|
// getDNSSDScrapeWork returns `dns_sd_configs` ScrapeWork from cfg.
|
2020-06-23 12:35:19 +00:00
|
|
|
func (cfg *Config) getDNSSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
2020-05-05 21:01:49 +00:00
|
|
|
var dst []ScrapeWork
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
2020-06-23 12:35:19 +00:00
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
2020-05-05 21:01:49 +00:00
|
|
|
for j := range sc.DNSSDConfigs {
|
|
|
|
sdc := &sc.DNSSDConfigs[j]
|
2020-06-23 12:35:19 +00:00
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendDNSScrapeWork(dst, sdc, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering dns targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
2020-05-05 21:01:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-04-27 16:25:45 +00:00
|
|
|
// getEC2SDScrapeWork returns `ec2_sd_configs` ScrapeWork from cfg.
|
2020-06-23 12:35:19 +00:00
|
|
|
func (cfg *Config) getEC2SDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
2020-04-27 16:25:45 +00:00
|
|
|
var dst []ScrapeWork
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
2020-06-23 12:35:19 +00:00
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
2020-04-27 16:25:45 +00:00
|
|
|
for j := range sc.EC2SDConfigs {
|
|
|
|
sdc := &sc.EC2SDConfigs[j]
|
2020-06-23 12:35:19 +00:00
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendEC2ScrapeWork(dst, sdc, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering ec2 targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
2020-04-27 16:25:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-04-24 14:50:21 +00:00
|
|
|
// getGCESDScrapeWork returns `gce_sd_configs` ScrapeWork from cfg.
|
2020-06-23 12:35:19 +00:00
|
|
|
func (cfg *Config) getGCESDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
|
|
|
swsPrevByJob := getSWSByJob(prev)
|
2020-04-24 14:50:21 +00:00
|
|
|
var dst []ScrapeWork
|
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
2020-06-23 12:35:19 +00:00
|
|
|
dstLen := len(dst)
|
|
|
|
ok := true
|
2020-04-24 14:50:21 +00:00
|
|
|
for j := range sc.GCESDConfigs {
|
|
|
|
sdc := &sc.GCESDConfigs[j]
|
2020-06-23 12:35:19 +00:00
|
|
|
var okLocal bool
|
|
|
|
dst, okLocal = appendGCEScrapeWork(dst, sdc, sc.swc)
|
|
|
|
if ok {
|
|
|
|
ok = okLocal
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
swsPrev := swsPrevByJob[sc.swc.jobName]
|
|
|
|
if len(swsPrev) > 0 {
|
|
|
|
logger.Errorf("there were errors when discovering gce targets for job %q, so preserving the previous targets", sc.swc.jobName)
|
|
|
|
dst = append(dst[:dstLen], swsPrev...)
|
2020-04-24 14:50:21 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
// getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg.
|
2020-06-23 12:35:19 +00:00
|
|
|
func (cfg *Config) getFileSDScrapeWork(prev []ScrapeWork) []ScrapeWork {
|
2020-02-23 11:35:47 +00:00
|
|
|
// Create a map for the previous scrape work.
|
2020-05-03 09:41:13 +00:00
|
|
|
swsMapPrev := make(map[string][]ScrapeWork)
|
2020-06-23 12:35:19 +00:00
|
|
|
for i := range prev {
|
|
|
|
sw := &prev[i]
|
2020-04-14 11:11:54 +00:00
|
|
|
filepath := promrelabel.GetLabelValueByName(sw.Labels, "__vm_filepath")
|
|
|
|
if len(filepath) == 0 {
|
2020-04-14 09:21:10 +00:00
|
|
|
logger.Panicf("BUG: missing `__vm_filepath` label")
|
2020-03-10 17:39:55 +00:00
|
|
|
} else {
|
2020-05-03 09:41:13 +00:00
|
|
|
swsMapPrev[filepath] = append(swsMapPrev[filepath], *sw)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
var dst []ScrapeWork
|
2020-04-23 11:38:12 +00:00
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
|
|
|
for j := range sc.FileSDConfigs {
|
|
|
|
sdc := &sc.FileSDConfigs[j]
|
2020-05-03 09:41:13 +00:00
|
|
|
dst = sdc.appendScrapeWork(dst, swsMapPrev, cfg.baseDir, sc.swc)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
2020-04-13 09:59:05 +00:00
|
|
|
// getStaticScrapeWork returns `static_configs` ScrapeWork from from cfg.
|
|
|
|
func (cfg *Config) getStaticScrapeWork() []ScrapeWork {
|
|
|
|
var dst []ScrapeWork
|
2020-04-23 11:38:12 +00:00
|
|
|
for i := range cfg.ScrapeConfigs {
|
|
|
|
sc := &cfg.ScrapeConfigs[i]
|
|
|
|
for j := range sc.StaticConfigs {
|
|
|
|
stc := &sc.StaticConfigs[j]
|
2020-04-13 09:59:05 +00:00
|
|
|
dst = stc.appendScrapeWork(dst, sc.swc, nil)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConfig) (*scrapeWorkConfig, error) {
|
|
|
|
jobName := sc.JobName
|
|
|
|
if jobName == "" {
|
|
|
|
return nil, fmt.Errorf("missing `job_name` field in `scrape_config`")
|
|
|
|
}
|
|
|
|
scrapeInterval := sc.ScrapeInterval
|
|
|
|
if scrapeInterval <= 0 {
|
|
|
|
scrapeInterval = globalCfg.ScrapeInterval
|
|
|
|
if scrapeInterval <= 0 {
|
|
|
|
scrapeInterval = defaultScrapeInterval
|
|
|
|
}
|
|
|
|
}
|
|
|
|
scrapeTimeout := sc.ScrapeTimeout
|
|
|
|
if scrapeTimeout <= 0 {
|
|
|
|
scrapeTimeout = globalCfg.ScrapeTimeout
|
|
|
|
if scrapeTimeout <= 0 {
|
|
|
|
scrapeTimeout = defaultScrapeTimeout
|
|
|
|
}
|
|
|
|
}
|
|
|
|
honorLabels := sc.HonorLabels
|
|
|
|
honorTimestamps := sc.HonorTimestamps
|
|
|
|
metricsPath := sc.MetricsPath
|
|
|
|
if metricsPath == "" {
|
|
|
|
metricsPath = "/metrics"
|
|
|
|
}
|
|
|
|
scheme := sc.Scheme
|
|
|
|
if scheme == "" {
|
|
|
|
scheme = "http"
|
|
|
|
}
|
|
|
|
if scheme != "http" && scheme != "https" {
|
|
|
|
return nil, fmt.Errorf("unexpected `scheme` for `job_name` %q: %q; supported values: http or https", jobName, scheme)
|
|
|
|
}
|
|
|
|
params := sc.Params
|
2020-04-13 09:59:05 +00:00
|
|
|
ac, err := promauth.NewConfig(baseDir, sc.BasicAuth, sc.BearerToken, sc.BearerTokenFile, sc.TLSConfig)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot parse auth config for `job_name` %q: %w", jobName, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
var relabelConfigs []promrelabel.ParsedRelabelConfig
|
|
|
|
relabelConfigs, err = promrelabel.ParseRelabelConfigs(relabelConfigs[:0], sc.RelabelConfigs)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot parse `relabel_configs` for `job_name` %q: %w", jobName, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
var metricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
|
|
|
metricRelabelConfigs, err = promrelabel.ParseRelabelConfigs(metricRelabelConfigs[:0], sc.MetricRelabelConfigs)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot parse `metric_relabel_configs` for `job_name` %q: %w", jobName, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
swc := &scrapeWorkConfig{
|
2020-04-13 09:59:05 +00:00
|
|
|
scrapeInterval: scrapeInterval,
|
|
|
|
scrapeTimeout: scrapeTimeout,
|
|
|
|
jobName: jobName,
|
|
|
|
metricsPath: metricsPath,
|
|
|
|
scheme: scheme,
|
|
|
|
params: params,
|
|
|
|
authConfig: ac,
|
|
|
|
honorLabels: honorLabels,
|
|
|
|
honorTimestamps: honorTimestamps,
|
|
|
|
externalLabels: globalCfg.ExternalLabels,
|
|
|
|
relabelConfigs: relabelConfigs,
|
|
|
|
metricRelabelConfigs: metricRelabelConfigs,
|
2020-04-14 08:58:15 +00:00
|
|
|
sampleLimit: sc.SampleLimit,
|
2020-07-02 11:19:11 +00:00
|
|
|
disableCompression: sc.DisableCompression,
|
|
|
|
disableKeepAlive: sc.DisableKeepAlive,
|
2020-11-01 21:12:13 +00:00
|
|
|
streamParse: sc.StreamParse,
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
return swc, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type scrapeWorkConfig struct {
|
2020-04-13 09:59:05 +00:00
|
|
|
scrapeInterval time.Duration
|
|
|
|
scrapeTimeout time.Duration
|
|
|
|
jobName string
|
|
|
|
metricsPath string
|
|
|
|
scheme string
|
|
|
|
params map[string][]string
|
|
|
|
authConfig *promauth.Config
|
|
|
|
honorLabels bool
|
|
|
|
honorTimestamps bool
|
|
|
|
externalLabels map[string]string
|
|
|
|
relabelConfigs []promrelabel.ParsedRelabelConfig
|
|
|
|
metricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
2020-04-14 08:58:15 +00:00
|
|
|
sampleLimit int
|
2020-07-02 11:19:11 +00:00
|
|
|
disableCompression bool
|
|
|
|
disableKeepAlive bool
|
2020-11-01 21:12:13 +00:00
|
|
|
streamParse bool
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 12:35:19 +00:00
|
|
|
func appendKubernetesScrapeWork(dst []ScrapeWork, sdc *kubernetes.SDConfig, baseDir string, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
2020-05-04 12:53:50 +00:00
|
|
|
targetLabels, err := kubernetes.GetLabels(sdc, baseDir)
|
2020-04-22 19:16:01 +00:00
|
|
|
if err != nil {
|
2020-05-05 21:01:49 +00:00
|
|
|
logger.Errorf("error when discovering kubernetes targets for `job_name` %q: %s; skipping it", swc.jobName, err)
|
2020-06-23 12:35:19 +00:00
|
|
|
return dst, false
|
2020-04-13 18:02:27 +00:00
|
|
|
}
|
2020-06-23 12:35:19 +00:00
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "kubernetes_sd_config"), true
|
2020-04-24 14:50:21 +00:00
|
|
|
}
|
|
|
|
|
2020-10-05 13:45:33 +00:00
|
|
|
func appendOpenstackScrapeWork(dst []ScrapeWork, sdc *openstack.SDConfig, baseDir string, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
|
|
|
targetLabels, err := openstack.GetLabels(sdc, baseDir)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error when discovering openstack targets for `job_name` %q: %s; skipping it", swc.jobName, err)
|
|
|
|
return dst, false
|
|
|
|
}
|
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "openstack_sd_config"), true
|
|
|
|
}
|
|
|
|
|
2020-10-12 10:38:21 +00:00
|
|
|
func appendDockerSwarmScrapeWork(dst []ScrapeWork, sdc *dockerswarm.SDConfig, baseDir string, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
|
|
|
targetLabels, err := dockerswarm.GetLabels(sdc, baseDir)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error when discovering dockerswarm targets for `job_name` %q: %s; skipping it", swc.jobName, err)
|
|
|
|
return dst, false
|
|
|
|
}
|
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "dockerswarm_sd_config"), true
|
|
|
|
}
|
|
|
|
|
2020-06-23 12:35:19 +00:00
|
|
|
func appendConsulScrapeWork(dst []ScrapeWork, sdc *consul.SDConfig, baseDir string, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
2020-05-04 17:48:02 +00:00
|
|
|
targetLabels, err := consul.GetLabels(sdc, baseDir)
|
|
|
|
if err != nil {
|
2020-05-05 21:01:49 +00:00
|
|
|
logger.Errorf("error when discovering consul targets for `job_name` %q: %s; skipping it", swc.jobName, err)
|
2020-06-23 12:35:19 +00:00
|
|
|
return dst, false
|
2020-05-04 17:48:02 +00:00
|
|
|
}
|
2020-06-23 12:35:19 +00:00
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "consul_sd_config"), true
|
2020-05-04 17:48:02 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 12:35:19 +00:00
|
|
|
func appendDNSScrapeWork(dst []ScrapeWork, sdc *dns.SDConfig, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
2020-05-05 21:01:49 +00:00
|
|
|
targetLabels, err := dns.GetLabels(sdc)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error when discovering dns targets for `job_name` %q: %s; skipping it", swc.jobName, err)
|
2020-06-23 12:35:19 +00:00
|
|
|
return dst, false
|
2020-05-05 21:01:49 +00:00
|
|
|
}
|
2020-06-23 12:35:19 +00:00
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "dns_sd_config"), true
|
2020-05-05 21:01:49 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 12:35:19 +00:00
|
|
|
func appendEC2ScrapeWork(dst []ScrapeWork, sdc *ec2.SDConfig, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
2020-04-27 16:25:45 +00:00
|
|
|
targetLabels, err := ec2.GetLabels(sdc)
|
|
|
|
if err != nil {
|
2020-05-05 21:01:49 +00:00
|
|
|
logger.Errorf("error when discovering ec2 targets for `job_name` %q: %s; skipping it", swc.jobName, err)
|
2020-06-23 12:35:19 +00:00
|
|
|
return dst, false
|
2020-04-27 16:25:45 +00:00
|
|
|
}
|
2020-06-23 12:35:19 +00:00
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "ec2_sd_config"), true
|
2020-04-27 16:25:45 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 12:35:19 +00:00
|
|
|
func appendGCEScrapeWork(dst []ScrapeWork, sdc *gce.SDConfig, swc *scrapeWorkConfig) ([]ScrapeWork, bool) {
|
2020-04-24 14:50:21 +00:00
|
|
|
targetLabels, err := gce.GetLabels(sdc)
|
|
|
|
if err != nil {
|
2020-05-05 21:01:49 +00:00
|
|
|
logger.Errorf("error when discovering gce targets for `job_name` %q: %s; skippint it", swc.jobName, err)
|
2020-06-23 12:35:19 +00:00
|
|
|
return dst, false
|
2020-04-24 14:50:21 +00:00
|
|
|
}
|
2020-06-23 12:35:19 +00:00
|
|
|
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, "gce_sd_config"), true
|
2020-04-24 14:50:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func appendScrapeWorkForTargetLabels(dst []ScrapeWork, swc *scrapeWorkConfig, targetLabels []map[string]string, sectionName string) []ScrapeWork {
|
2020-04-13 18:02:27 +00:00
|
|
|
for _, metaLabels := range targetLabels {
|
|
|
|
target := metaLabels["__address__"]
|
|
|
|
var err error
|
|
|
|
dst, err = appendScrapeWork(dst, swc, target, nil, metaLabels)
|
|
|
|
if err != nil {
|
2020-04-24 14:50:21 +00:00
|
|
|
logger.Errorf("error when parsing `%s` target %q for `job_name` %q: %s; skipping it", sectionName, target, swc.jobName, err)
|
2020-04-13 18:02:27 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2020-05-03 09:41:13 +00:00
|
|
|
func (sdc *FileSDConfig) appendScrapeWork(dst []ScrapeWork, swsMapPrev map[string][]ScrapeWork, baseDir string, swc *scrapeWorkConfig) []ScrapeWork {
|
2020-02-23 11:35:47 +00:00
|
|
|
for _, file := range sdc.Files {
|
|
|
|
pathPattern := getFilepath(baseDir, file)
|
|
|
|
paths := []string{pathPattern}
|
|
|
|
if strings.Contains(pathPattern, "*") {
|
|
|
|
var err error
|
|
|
|
paths, err = filepath.Glob(pathPattern)
|
|
|
|
if err != nil {
|
2020-04-13 09:59:05 +00:00
|
|
|
// Do not return this error, since other files may contain valid scrape configs.
|
|
|
|
logger.Errorf("invalid pattern %q in `files` section: %s; skipping it", file, err)
|
|
|
|
continue
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, path := range paths {
|
|
|
|
stcs, err := loadStaticConfigs(path)
|
|
|
|
if err != nil {
|
|
|
|
// Do not return this error, since other paths may contain valid scrape configs.
|
2020-05-03 09:41:13 +00:00
|
|
|
if sws := swsMapPrev[path]; sws != nil {
|
2020-02-23 11:35:47 +00:00
|
|
|
// Re-use the previous valid scrape work for this path.
|
|
|
|
logger.Errorf("keeping the previously loaded `static_configs` from %q because of error when re-loading the file: %s", path, err)
|
|
|
|
dst = append(dst, sws...)
|
|
|
|
} else {
|
|
|
|
logger.Errorf("skipping loading `static_configs` from %q because of error: %s", path, err)
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
pathShort := path
|
|
|
|
if strings.HasPrefix(pathShort, baseDir) {
|
|
|
|
pathShort = path[len(baseDir):]
|
|
|
|
if len(pathShort) > 0 && pathShort[0] == filepath.Separator {
|
|
|
|
pathShort = pathShort[1:]
|
|
|
|
}
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
metaLabels := map[string]string{
|
2020-02-23 11:35:47 +00:00
|
|
|
"__meta_filepath": pathShort,
|
2020-05-03 09:41:13 +00:00
|
|
|
"__vm_filepath": path, // This label is needed for internal promscrape logic
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
for i := range stcs {
|
2020-04-13 09:59:05 +00:00
|
|
|
dst = stcs[i].appendScrapeWork(dst, swc, metaLabels)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
2020-04-13 09:59:05 +00:00
|
|
|
func (stc *StaticConfig) appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, metaLabels map[string]string) []ScrapeWork {
|
2020-02-23 11:35:47 +00:00
|
|
|
for _, target := range stc.Targets {
|
|
|
|
if target == "" {
|
2020-04-13 09:59:05 +00:00
|
|
|
// Do not return this error, since other targets may be valid
|
|
|
|
logger.Errorf("`static_configs` target for `job_name` %q cannot be empty; skipping it", swc.jobName)
|
2020-02-23 11:35:47 +00:00
|
|
|
continue
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
var err error
|
|
|
|
dst, err = appendScrapeWork(dst, swc, target, stc.Labels, metaLabels)
|
|
|
|
if err != nil {
|
|
|
|
// Do not return this error, since other targets may be valid
|
|
|
|
logger.Errorf("error when parsing `static_configs` target %q for `job_name` %q: %s; skipping it", target, swc.jobName, err)
|
2020-02-23 11:35:47 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
|
|
|
func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, extraLabels, metaLabels map[string]string) ([]ScrapeWork, error) {
|
|
|
|
labels := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, extraLabels, swc.externalLabels, metaLabels, swc.params)
|
2020-11-04 09:45:02 +00:00
|
|
|
originalLabels := append([]prompbmarshal.Label{}, labels...)
|
|
|
|
promrelabel.SortLabels(originalLabels)
|
2020-04-13 09:59:05 +00:00
|
|
|
labels = promrelabel.ApplyRelabelConfigs(labels, 0, swc.relabelConfigs, false)
|
2020-04-14 09:21:10 +00:00
|
|
|
labels = promrelabel.RemoveMetaLabels(labels[:0], labels)
|
2020-11-09 08:54:24 +00:00
|
|
|
// Remove references to already deleted labels, so GC could clean strings for label name and label value past len(labels).
|
2020-11-07 14:16:56 +00:00
|
|
|
// This should reduce memory usage when relabeling creates big number of temporary labels with long names and/or values.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/825 for details.
|
2020-11-09 08:54:24 +00:00
|
|
|
labels = append([]prompbmarshal.Label{}, labels...)
|
2020-11-07 14:16:56 +00:00
|
|
|
|
2020-04-13 09:59:05 +00:00
|
|
|
if len(labels) == 0 {
|
|
|
|
// Drop target without labels.
|
2020-10-20 18:44:59 +00:00
|
|
|
droppedTargetsMap.Register(originalLabels)
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst, nil
|
|
|
|
}
|
|
|
|
// See https://www.robustperception.io/life-of-a-label
|
2020-04-14 11:11:54 +00:00
|
|
|
schemeRelabeled := promrelabel.GetLabelValueByName(labels, "__scheme__")
|
|
|
|
if len(schemeRelabeled) == 0 {
|
2020-04-13 09:59:05 +00:00
|
|
|
schemeRelabeled = "http"
|
|
|
|
}
|
2020-04-14 11:11:54 +00:00
|
|
|
addressRelabeled := promrelabel.GetLabelValueByName(labels, "__address__")
|
|
|
|
if len(addressRelabeled) == 0 {
|
2020-04-13 09:59:05 +00:00
|
|
|
// Drop target without scrape address.
|
2020-10-20 18:44:59 +00:00
|
|
|
droppedTargetsMap.Register(originalLabels)
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst, nil
|
|
|
|
}
|
2020-05-03 13:51:03 +00:00
|
|
|
if strings.Contains(addressRelabeled, "/") {
|
2020-04-13 09:59:05 +00:00
|
|
|
// Drop target with '/'
|
2020-10-20 18:44:59 +00:00
|
|
|
droppedTargetsMap.Register(originalLabels)
|
2020-04-13 09:59:05 +00:00
|
|
|
return dst, nil
|
|
|
|
}
|
2020-05-03 13:41:33 +00:00
|
|
|
addressRelabeled = addMissingPort(schemeRelabeled, addressRelabeled)
|
2020-04-14 11:11:54 +00:00
|
|
|
metricsPathRelabeled := promrelabel.GetLabelValueByName(labels, "__metrics_path__")
|
2020-04-13 09:59:05 +00:00
|
|
|
if metricsPathRelabeled == "" {
|
|
|
|
metricsPathRelabeled = "/metrics"
|
|
|
|
}
|
2020-10-29 05:39:42 +00:00
|
|
|
if !strings.HasPrefix(metricsPathRelabeled, "/") {
|
|
|
|
metricsPathRelabeled = "/" + metricsPathRelabeled
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
paramsRelabeled := getParamsFromLabels(labels, swc.params)
|
|
|
|
optionalQuestion := "?"
|
|
|
|
if len(paramsRelabeled) == 0 || strings.Contains(metricsPathRelabeled, "?") {
|
|
|
|
optionalQuestion = ""
|
|
|
|
}
|
|
|
|
paramsStr := url.Values(paramsRelabeled).Encode()
|
2020-05-03 13:51:03 +00:00
|
|
|
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", schemeRelabeled, addressRelabeled, metricsPathRelabeled, optionalQuestion, paramsStr)
|
2020-04-13 09:59:05 +00:00
|
|
|
if _, err := url.Parse(scrapeURL); err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return dst, fmt.Errorf("invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %w",
|
2020-05-03 13:51:03 +00:00
|
|
|
scrapeURL, swc.scheme, schemeRelabeled, target, addressRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
|
2020-04-13 09:59:05 +00:00
|
|
|
}
|
2020-05-03 13:41:33 +00:00
|
|
|
// Set missing "instance" label according to https://www.robustperception.io/life-of-a-label
|
|
|
|
if promrelabel.GetLabelByName(labels, "instance") == nil {
|
|
|
|
labels = append(labels, prompbmarshal.Label{
|
|
|
|
Name: "instance",
|
|
|
|
Value: addressRelabeled,
|
|
|
|
})
|
|
|
|
promrelabel.SortLabels(labels)
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
dst = append(dst, ScrapeWork{
|
2020-04-14 10:08:48 +00:00
|
|
|
ID: atomic.AddUint64(&nextScrapeWorkID, 1),
|
2020-04-13 09:59:05 +00:00
|
|
|
ScrapeURL: scrapeURL,
|
|
|
|
ScrapeInterval: swc.scrapeInterval,
|
|
|
|
ScrapeTimeout: swc.scrapeTimeout,
|
|
|
|
HonorLabels: swc.honorLabels,
|
|
|
|
HonorTimestamps: swc.honorTimestamps,
|
2020-10-08 15:50:22 +00:00
|
|
|
OriginalLabels: originalLabels,
|
2020-04-13 09:59:05 +00:00
|
|
|
Labels: labels,
|
|
|
|
AuthConfig: swc.authConfig,
|
|
|
|
MetricRelabelConfigs: swc.metricRelabelConfigs,
|
2020-04-14 08:58:15 +00:00
|
|
|
SampleLimit: swc.sampleLimit,
|
2020-07-02 11:19:11 +00:00
|
|
|
DisableCompression: swc.disableCompression,
|
|
|
|
DisableKeepAlive: swc.disableKeepAlive,
|
2020-11-01 21:12:13 +00:00
|
|
|
StreamParse: swc.streamParse,
|
2020-06-23 12:35:19 +00:00
|
|
|
|
|
|
|
jobNameOriginal: swc.jobName,
|
2020-04-13 09:59:05 +00:00
|
|
|
})
|
2020-02-23 11:35:47 +00:00
|
|
|
return dst, nil
|
|
|
|
}
|
|
|
|
|
2020-04-14 10:08:48 +00:00
|
|
|
// Each ScrapeWork has an ID, which is used for locating it when updating its status.
|
|
|
|
var nextScrapeWorkID uint64
|
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]string) map[string][]string {
|
|
|
|
// See https://www.robustperception.io/life-of-a-label
|
|
|
|
m := make(map[string][]string)
|
|
|
|
for i := range labels {
|
|
|
|
label := &labels[i]
|
|
|
|
if !strings.HasPrefix(label.Name, "__param_") {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
name := label.Name[len("__param_"):]
|
|
|
|
values := []string{label.Value}
|
|
|
|
if p := paramsOrig[name]; len(p) > 1 {
|
|
|
|
values = append(values, p[1:]...)
|
|
|
|
}
|
|
|
|
m[name] = values
|
|
|
|
}
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
2020-04-13 09:59:05 +00:00
|
|
|
func mergeLabels(job, scheme, target, metricsPath string, extraLabels, externalLabels, metaLabels map[string]string, params map[string][]string) []prompbmarshal.Label {
|
2020-02-23 11:35:47 +00:00
|
|
|
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
|
2020-03-12 18:17:13 +00:00
|
|
|
m := make(map[string]string)
|
2020-02-23 11:35:47 +00:00
|
|
|
for k, v := range externalLabels {
|
|
|
|
m[k] = v
|
|
|
|
}
|
2020-03-12 18:17:13 +00:00
|
|
|
m["job"] = job
|
2020-05-03 13:41:33 +00:00
|
|
|
m["__address__"] = target
|
2020-03-12 18:17:13 +00:00
|
|
|
m["__scheme__"] = scheme
|
|
|
|
m["__metrics_path__"] = metricsPath
|
2020-02-23 11:35:47 +00:00
|
|
|
for k, args := range params {
|
|
|
|
if len(args) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
k = "__param_" + k
|
|
|
|
v := args[0]
|
2020-03-12 18:17:13 +00:00
|
|
|
m[k] = v
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
for k, v := range extraLabels {
|
2020-03-12 18:17:13 +00:00
|
|
|
m[k] = v
|
|
|
|
}
|
|
|
|
for k, v := range metaLabels {
|
2020-02-23 11:35:47 +00:00
|
|
|
m[k] = v
|
|
|
|
}
|
|
|
|
result := make([]prompbmarshal.Label, 0, len(m))
|
|
|
|
for k, v := range m {
|
|
|
|
result = append(result, prompbmarshal.Label{
|
|
|
|
Name: k,
|
|
|
|
Value: v,
|
|
|
|
})
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return result
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func getFilepath(baseDir, path string) string {
|
|
|
|
if filepath.IsAbs(path) {
|
|
|
|
return path
|
|
|
|
}
|
|
|
|
return filepath.Join(baseDir, path)
|
|
|
|
}
|
|
|
|
|
|
|
|
func addMissingPort(scheme, target string) string {
|
|
|
|
if strings.Contains(target, ":") {
|
|
|
|
return target
|
|
|
|
}
|
|
|
|
if scheme == "https" {
|
|
|
|
target += ":443"
|
|
|
|
} else {
|
|
|
|
target += ":80"
|
|
|
|
}
|
|
|
|
return target
|
|
|
|
}
|
|
|
|
|
|
|
|
const (
|
|
|
|
defaultScrapeInterval = time.Minute
|
|
|
|
defaultScrapeTimeout = 10 * time.Second
|
|
|
|
)
|