2020-02-23 11:35:47 +00:00
package promscrape
import (
2020-04-13 10:15:30 +00:00
"flag"
2020-02-23 11:35:47 +00:00
"fmt"
"io/ioutil"
"net/url"
"path/filepath"
"strings"
2020-12-08 10:22:57 +00:00
"sync"
2020-02-23 11:35:47 +00:00
"time"
2020-08-13 13:43:55 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/envtemplate"
2020-02-23 11:35:47 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
2020-12-24 08:52:37 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/netutil"
2020-04-13 09:59:05 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
2020-02-23 11:35:47 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
2020-05-04 17:48:02 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/consul"
2020-05-05 21:01:49 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/dns"
2020-10-12 10:38:21 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/dockerswarm"
2020-04-27 16:25:45 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/ec2"
2020-11-20 11:38:12 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/eureka"
2020-04-24 14:50:21 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/gce"
2020-04-13 18:02:27 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/kubernetes"
2020-10-05 13:45:33 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/openstack"
2020-02-23 11:35:47 +00:00
"gopkg.in/yaml.v2"
)
2020-04-13 10:15:30 +00:00
var (
2020-12-07 11:15:42 +00:00
strictParse = flag . Bool ( "promscrape.config.strictParse" , false , "Whether to allow only supported fields in -promscrape.config . " +
"By default unsupported fields are silently skipped" )
2020-05-21 11:54:28 +00:00
dryRun = flag . Bool ( "promscrape.config.dryRun" , false , "Checks -promscrape.config file for errors and unsupported fields and then exits. " +
"Returns non-zero exit code on parsing errors and emits these errors to stderr. " +
2020-12-07 11:15:42 +00:00
"See also -promscrape.config.strictParse command-line flag. " +
"Pass -loggerLevel=ERROR if you don't need to see info messages in the output." )
2020-11-04 09:08:30 +00:00
dropOriginalLabels = flag . Bool ( "promscrape.dropOriginalLabels" , false , "Whether to drop original labels for scrape targets at /targets and /api/v1/targets pages. " +
"This may be needed for reducing memory usage when original labels for big number of scrape targets occupy big amounts of memory. " +
"Note that this reduces debuggability for improper per-target relabeling configs" )
2020-04-13 10:15:30 +00:00
)
2020-02-23 11:35:47 +00:00
// Config represents essential parts from Prometheus config defined at https://prometheus.io/docs/prometheus/latest/configuration/configuration/
type Config struct {
Global GlobalConfig ` yaml:"global" `
ScrapeConfigs [ ] ScrapeConfig ` yaml:"scrape_configs" `
// This is set to the directory from where the config has been loaded.
baseDir string
}
// GlobalConfig represents essential parts for `global` section of Prometheus config.
//
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/
type GlobalConfig struct {
2020-11-13 14:17:03 +00:00
ScrapeInterval time . Duration ` yaml:"scrape_interval,omitempty" `
ScrapeTimeout time . Duration ` yaml:"scrape_timeout,omitempty" `
ExternalLabels map [ string ] string ` yaml:"external_labels,omitempty" `
2020-02-23 11:35:47 +00:00
}
// ScrapeConfig represents essential parts for `scrape_config` section of Prometheus config.
//
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config
type ScrapeConfig struct {
JobName string ` yaml:"job_name" `
2020-11-13 14:17:03 +00:00
ScrapeInterval time . Duration ` yaml:"scrape_interval,omitempty" `
ScrapeTimeout time . Duration ` yaml:"scrape_timeout,omitempty" `
MetricsPath string ` yaml:"metrics_path,omitempty" `
HonorLabels bool ` yaml:"honor_labels,omitempty" `
HonorTimestamps bool ` yaml:"honor_timestamps,omitempty" `
Scheme string ` yaml:"scheme,omitempty" `
Params map [ string ] [ ] string ` yaml:"params,omitempty" `
BasicAuth * promauth . BasicAuthConfig ` yaml:"basic_auth,omitempty" `
BearerToken string ` yaml:"bearer_token,omitempty" `
BearerTokenFile string ` yaml:"bearer_token_file,omitempty" `
2020-12-24 08:52:37 +00:00
ProxyURL netutil . ProxyURL ` yaml:"proxy_url,omitempty" `
2020-11-13 14:17:03 +00:00
TLSConfig * promauth . TLSConfig ` yaml:"tls_config,omitempty" `
StaticConfigs [ ] StaticConfig ` yaml:"static_configs,omitempty" `
FileSDConfigs [ ] FileSDConfig ` yaml:"file_sd_configs,omitempty" `
KubernetesSDConfigs [ ] kubernetes . SDConfig ` yaml:"kubernetes_sd_configs,omitempty" `
OpenStackSDConfigs [ ] openstack . SDConfig ` yaml:"openstack_sd_configs,omitempty" `
ConsulSDConfigs [ ] consul . SDConfig ` yaml:"consul_sd_configs,omitempty" `
2020-11-20 11:38:12 +00:00
EurekaSDConfigs [ ] eureka . SDConfig ` yaml:"eureka_sd_configs,omitempty" `
2020-11-13 14:17:03 +00:00
DockerSwarmConfigs [ ] dockerswarm . SDConfig ` yaml:"dockerswarm_sd_configs,omitempty" `
DNSSDConfigs [ ] dns . SDConfig ` yaml:"dns_sd_configs,omitempty" `
EC2SDConfigs [ ] ec2 . SDConfig ` yaml:"ec2_sd_configs,omitempty" `
GCESDConfigs [ ] gce . SDConfig ` yaml:"gce_sd_configs,omitempty" `
RelabelConfigs [ ] promrelabel . RelabelConfig ` yaml:"relabel_configs,omitempty" `
MetricRelabelConfigs [ ] promrelabel . RelabelConfig ` yaml:"metric_relabel_configs,omitempty" `
SampleLimit int ` yaml:"sample_limit,omitempty" `
2020-02-23 11:35:47 +00:00
2020-07-02 11:19:11 +00:00
// These options are supported only by lib/promscrape.
2020-11-13 14:17:03 +00:00
DisableCompression bool ` yaml:"disable_compression,omitempty" `
DisableKeepAlive bool ` yaml:"disable_keepalive,omitempty" `
StreamParse bool ` yaml:"stream_parse,omitempty" `
2020-07-02 11:19:11 +00:00
2020-02-23 11:35:47 +00:00
// This is set in loadConfig
swc * scrapeWorkConfig
}
// FileSDConfig represents file-based service discovery config.
//
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#file_sd_config
type FileSDConfig struct {
Files [ ] string ` yaml:"files" `
// `refresh_interval` is ignored. See `-prometheus.fileSDCheckInterval`
}
// StaticConfig represents essential parts for `static_config` section of Prometheus config.
//
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#static_config
type StaticConfig struct {
Targets [ ] string ` yaml:"targets" `
2020-11-13 14:17:03 +00:00
Labels map [ string ] string ` yaml:"labels,omitempty" `
2020-02-23 11:35:47 +00:00
}
func loadStaticConfigs ( path string ) ( [ ] StaticConfig , error ) {
data , err := ioutil . ReadFile ( path )
if err != nil {
2020-06-30 19:58:18 +00:00
return nil , fmt . Errorf ( "cannot read `static_configs` from %q: %w" , path , err )
2020-02-23 11:35:47 +00:00
}
2020-08-13 13:43:55 +00:00
data = envtemplate . Replace ( data )
2020-02-23 11:35:47 +00:00
var stcs [ ] StaticConfig
2020-03-06 18:18:28 +00:00
if err := yaml . UnmarshalStrict ( data , & stcs ) ; err != nil {
2020-06-30 19:58:18 +00:00
return nil , fmt . Errorf ( "cannot unmarshal `static_configs` from %q: %w" , path , err )
2020-02-23 11:35:47 +00:00
}
return stcs , nil
}
// loadConfig loads Prometheus config from the given path.
2020-04-23 20:40:50 +00:00
func loadConfig ( path string ) ( cfg * Config , data [ ] byte , err error ) {
data , err = ioutil . ReadFile ( path )
2020-02-23 11:35:47 +00:00
if err != nil {
2020-06-30 19:58:18 +00:00
return nil , nil , fmt . Errorf ( "cannot read Prometheus config from %q: %w" , path , err )
2020-02-23 11:35:47 +00:00
}
var cfgObj Config
if err := cfgObj . parse ( data , path ) ; err != nil {
2020-06-30 19:58:18 +00:00
return nil , nil , fmt . Errorf ( "cannot parse Prometheus config from %q: %w" , path , err )
2020-02-23 11:35:47 +00:00
}
2020-04-23 20:40:50 +00:00
return & cfgObj , data , nil
2020-02-23 11:35:47 +00:00
}
2020-11-25 20:59:13 +00:00
// IsDryRun returns true if -promscrape.config.dryRun command-line flag is set
func IsDryRun ( ) bool {
return * dryRun
}
2020-02-23 11:35:47 +00:00
func ( cfg * Config ) parse ( data [ ] byte , path string ) error {
2020-04-13 10:15:30 +00:00
if err := unmarshalMaybeStrict ( data , cfg ) ; err != nil {
2020-06-30 19:58:18 +00:00
return fmt . Errorf ( "cannot unmarshal data: %w" , err )
2020-02-23 11:35:47 +00:00
}
absPath , err := filepath . Abs ( path )
if err != nil {
2020-06-30 19:58:18 +00:00
return fmt . Errorf ( "cannot obtain abs path for %q: %w" , path , err )
2020-02-23 11:35:47 +00:00
}
cfg . baseDir = filepath . Dir ( absPath )
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
swc , err := getScrapeWorkConfig ( sc , cfg . baseDir , & cfg . Global )
if err != nil {
2020-06-30 19:58:18 +00:00
return fmt . Errorf ( "cannot parse `scrape_config` #%d: %w" , i + 1 , err )
2020-02-23 11:35:47 +00:00
}
sc . swc = swc
}
return nil
}
2020-04-13 10:15:30 +00:00
func unmarshalMaybeStrict ( data [ ] byte , dst interface { } ) error {
2020-08-13 13:43:55 +00:00
data = envtemplate . Replace ( data )
2020-04-13 10:15:30 +00:00
var err error
2020-12-07 11:15:42 +00:00
if * strictParse {
2020-04-13 10:15:30 +00:00
err = yaml . UnmarshalStrict ( data , dst )
} else {
err = yaml . Unmarshal ( data , dst )
}
return err
}
2020-12-08 15:50:03 +00:00
func getSWSByJob ( sws [ ] * ScrapeWork ) map [ string ] [ ] * ScrapeWork {
m := make ( map [ string ] [ ] * ScrapeWork )
2020-06-23 12:35:19 +00:00
for _ , sw := range sws {
m [ sw . jobNameOriginal ] = append ( m [ sw . jobNameOriginal ] , sw )
}
return m
}
2020-04-23 11:38:12 +00:00
// getKubernetesSDScrapeWork returns `kubernetes_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getKubernetesSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-06-23 12:35:19 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-04-23 11:38:12 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
2020-06-23 12:35:19 +00:00
dstLen := len ( dst )
ok := true
2020-04-23 11:38:12 +00:00
for j := range sc . KubernetesSDConfigs {
sdc := & sc . KubernetesSDConfigs [ j ]
2020-06-23 12:35:19 +00:00
var okLocal bool
dst , okLocal = appendKubernetesScrapeWork ( dst , sdc , cfg . baseDir , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering kubernetes targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
2020-04-13 18:02:27 +00:00
}
}
return dst
}
2020-10-05 13:45:33 +00:00
// getOpenStackSDScrapeWork returns `openstack_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getOpenStackSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-10-05 13:45:33 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-10-05 13:45:33 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
dstLen := len ( dst )
ok := true
for j := range sc . OpenStackSDConfigs {
sdc := & sc . OpenStackSDConfigs [ j ]
var okLocal bool
dst , okLocal = appendOpenstackScrapeWork ( dst , sdc , cfg . baseDir , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering openstack targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
}
}
return dst
}
2020-10-12 10:38:21 +00:00
// getDockerSwarmSDScrapeWork returns `dockerswarm_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getDockerSwarmSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-10-12 10:38:21 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-10-12 10:38:21 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
dstLen := len ( dst )
ok := true
for j := range sc . DockerSwarmConfigs {
sdc := & sc . DockerSwarmConfigs [ j ]
var okLocal bool
dst , okLocal = appendDockerSwarmScrapeWork ( dst , sdc , cfg . baseDir , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering dockerswarm targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
}
}
return dst
}
2020-05-04 17:48:02 +00:00
// getConsulSDScrapeWork returns `consul_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getConsulSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-06-23 12:35:19 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-05-04 17:48:02 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
2020-06-23 12:35:19 +00:00
dstLen := len ( dst )
ok := true
2020-05-04 17:48:02 +00:00
for j := range sc . ConsulSDConfigs {
sdc := & sc . ConsulSDConfigs [ j ]
2020-06-23 12:35:19 +00:00
var okLocal bool
dst , okLocal = appendConsulScrapeWork ( dst , sdc , cfg . baseDir , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering consul targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
2020-05-04 17:48:02 +00:00
}
}
return dst
}
2020-11-20 11:38:12 +00:00
// getEurekaSDScrapeWork returns `eureka_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getEurekaSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-11-20 11:38:12 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-11-20 11:38:12 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
dstLen := len ( dst )
ok := true
for j := range sc . EurekaSDConfigs {
sdc := & sc . EurekaSDConfigs [ j ]
var okLocal bool
dst , okLocal = appendEurekaScrapeWork ( dst , sdc , cfg . baseDir , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering eureka targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
}
}
return dst
}
2020-05-05 21:01:49 +00:00
// getDNSSDScrapeWork returns `dns_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getDNSSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-06-23 12:35:19 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-05-05 21:01:49 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
2020-06-23 12:35:19 +00:00
dstLen := len ( dst )
ok := true
2020-05-05 21:01:49 +00:00
for j := range sc . DNSSDConfigs {
sdc := & sc . DNSSDConfigs [ j ]
2020-06-23 12:35:19 +00:00
var okLocal bool
dst , okLocal = appendDNSScrapeWork ( dst , sdc , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering dns targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
2020-05-05 21:01:49 +00:00
}
}
return dst
}
2020-04-27 16:25:45 +00:00
// getEC2SDScrapeWork returns `ec2_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getEC2SDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-06-23 12:35:19 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-04-27 16:25:45 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
2020-06-23 12:35:19 +00:00
dstLen := len ( dst )
ok := true
2020-04-27 16:25:45 +00:00
for j := range sc . EC2SDConfigs {
sdc := & sc . EC2SDConfigs [ j ]
2020-06-23 12:35:19 +00:00
var okLocal bool
dst , okLocal = appendEC2ScrapeWork ( dst , sdc , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering ec2 targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
2020-04-27 16:25:45 +00:00
}
}
return dst
}
2020-04-24 14:50:21 +00:00
// getGCESDScrapeWork returns `gce_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getGCESDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-06-23 12:35:19 +00:00
swsPrevByJob := getSWSByJob ( prev )
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-04-24 14:50:21 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
2020-06-23 12:35:19 +00:00
dstLen := len ( dst )
ok := true
2020-04-24 14:50:21 +00:00
for j := range sc . GCESDConfigs {
sdc := & sc . GCESDConfigs [ j ]
2020-06-23 12:35:19 +00:00
var okLocal bool
dst , okLocal = appendGCEScrapeWork ( dst , sdc , sc . swc )
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob [ sc . swc . jobName ]
if len ( swsPrev ) > 0 {
logger . Errorf ( "there were errors when discovering gce targets for job %q, so preserving the previous targets" , sc . swc . jobName )
dst = append ( dst [ : dstLen ] , swsPrev ... )
2020-04-24 14:50:21 +00:00
}
}
return dst
}
2020-02-23 11:35:47 +00:00
// getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getFileSDScrapeWork ( prev [ ] * ScrapeWork ) [ ] * ScrapeWork {
2020-02-23 11:35:47 +00:00
// Create a map for the previous scrape work.
2020-12-08 15:50:03 +00:00
swsMapPrev := make ( map [ string ] [ ] * ScrapeWork )
for _ , sw := range prev {
2020-04-14 11:11:54 +00:00
filepath := promrelabel . GetLabelValueByName ( sw . Labels , "__vm_filepath" )
if len ( filepath ) == 0 {
2020-04-14 09:21:10 +00:00
logger . Panicf ( "BUG: missing `__vm_filepath` label" )
2020-03-10 17:39:55 +00:00
} else {
2020-12-08 15:50:03 +00:00
swsMapPrev [ filepath ] = append ( swsMapPrev [ filepath ] , sw )
2020-02-23 11:35:47 +00:00
}
}
2020-12-08 15:50:03 +00:00
dst := make ( [ ] * ScrapeWork , 0 , len ( prev ) )
2020-04-23 11:38:12 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
for j := range sc . FileSDConfigs {
sdc := & sc . FileSDConfigs [ j ]
2020-05-03 09:41:13 +00:00
dst = sdc . appendScrapeWork ( dst , swsMapPrev , cfg . baseDir , sc . swc )
2020-02-23 11:35:47 +00:00
}
}
2020-04-13 09:59:05 +00:00
return dst
2020-02-23 11:35:47 +00:00
}
2020-04-13 09:59:05 +00:00
// getStaticScrapeWork returns `static_configs` ScrapeWork from from cfg.
2020-12-08 15:50:03 +00:00
func ( cfg * Config ) getStaticScrapeWork ( ) [ ] * ScrapeWork {
var dst [ ] * ScrapeWork
2020-04-23 11:38:12 +00:00
for i := range cfg . ScrapeConfigs {
sc := & cfg . ScrapeConfigs [ i ]
for j := range sc . StaticConfigs {
stc := & sc . StaticConfigs [ j ]
2020-04-13 09:59:05 +00:00
dst = stc . appendScrapeWork ( dst , sc . swc , nil )
2020-02-23 11:35:47 +00:00
}
}
2020-04-13 09:59:05 +00:00
return dst
2020-02-23 11:35:47 +00:00
}
func getScrapeWorkConfig ( sc * ScrapeConfig , baseDir string , globalCfg * GlobalConfig ) ( * scrapeWorkConfig , error ) {
jobName := sc . JobName
if jobName == "" {
return nil , fmt . Errorf ( "missing `job_name` field in `scrape_config`" )
}
scrapeInterval := sc . ScrapeInterval
if scrapeInterval <= 0 {
scrapeInterval = globalCfg . ScrapeInterval
if scrapeInterval <= 0 {
scrapeInterval = defaultScrapeInterval
}
}
scrapeTimeout := sc . ScrapeTimeout
if scrapeTimeout <= 0 {
scrapeTimeout = globalCfg . ScrapeTimeout
if scrapeTimeout <= 0 {
scrapeTimeout = defaultScrapeTimeout
}
}
honorLabels := sc . HonorLabels
honorTimestamps := sc . HonorTimestamps
metricsPath := sc . MetricsPath
if metricsPath == "" {
metricsPath = "/metrics"
}
scheme := sc . Scheme
if scheme == "" {
scheme = "http"
}
if scheme != "http" && scheme != "https" {
return nil , fmt . Errorf ( "unexpected `scheme` for `job_name` %q: %q; supported values: http or https" , jobName , scheme )
}
params := sc . Params
2020-04-13 09:59:05 +00:00
ac , err := promauth . NewConfig ( baseDir , sc . BasicAuth , sc . BearerToken , sc . BearerTokenFile , sc . TLSConfig )
if err != nil {
2020-06-30 19:58:18 +00:00
return nil , fmt . Errorf ( "cannot parse auth config for `job_name` %q: %w" , jobName , err )
2020-02-23 11:35:47 +00:00
}
var relabelConfigs [ ] promrelabel . ParsedRelabelConfig
relabelConfigs , err = promrelabel . ParseRelabelConfigs ( relabelConfigs [ : 0 ] , sc . RelabelConfigs )
if err != nil {
2020-06-30 19:58:18 +00:00
return nil , fmt . Errorf ( "cannot parse `relabel_configs` for `job_name` %q: %w" , jobName , err )
2020-02-23 11:35:47 +00:00
}
var metricRelabelConfigs [ ] promrelabel . ParsedRelabelConfig
metricRelabelConfigs , err = promrelabel . ParseRelabelConfigs ( metricRelabelConfigs [ : 0 ] , sc . MetricRelabelConfigs )
if err != nil {
2020-06-30 19:58:18 +00:00
return nil , fmt . Errorf ( "cannot parse `metric_relabel_configs` for `job_name` %q: %w" , jobName , err )
2020-02-23 11:35:47 +00:00
}
swc := & scrapeWorkConfig {
2020-04-13 09:59:05 +00:00
scrapeInterval : scrapeInterval ,
scrapeTimeout : scrapeTimeout ,
jobName : jobName ,
metricsPath : metricsPath ,
scheme : scheme ,
params : params ,
2020-12-24 08:52:37 +00:00
proxyURL : sc . ProxyURL . URL ( ) ,
2020-04-13 09:59:05 +00:00
authConfig : ac ,
honorLabels : honorLabels ,
honorTimestamps : honorTimestamps ,
externalLabels : globalCfg . ExternalLabels ,
relabelConfigs : relabelConfigs ,
metricRelabelConfigs : metricRelabelConfigs ,
2020-04-14 08:58:15 +00:00
sampleLimit : sc . SampleLimit ,
2020-07-02 11:19:11 +00:00
disableCompression : sc . DisableCompression ,
disableKeepAlive : sc . DisableKeepAlive ,
2020-11-01 21:12:13 +00:00
streamParse : sc . StreamParse ,
2020-02-23 11:35:47 +00:00
}
return swc , nil
}
type scrapeWorkConfig struct {
2020-04-13 09:59:05 +00:00
scrapeInterval time . Duration
scrapeTimeout time . Duration
jobName string
metricsPath string
scheme string
params map [ string ] [ ] string
2020-12-24 08:52:37 +00:00
proxyURL * url . URL
2020-04-13 09:59:05 +00:00
authConfig * promauth . Config
honorLabels bool
honorTimestamps bool
externalLabels map [ string ] string
relabelConfigs [ ] promrelabel . ParsedRelabelConfig
metricRelabelConfigs [ ] promrelabel . ParsedRelabelConfig
2020-04-14 08:58:15 +00:00
sampleLimit int
2020-07-02 11:19:11 +00:00
disableCompression bool
disableKeepAlive bool
2020-11-01 21:12:13 +00:00
streamParse bool
2020-02-23 11:35:47 +00:00
}
2020-12-08 15:50:03 +00:00
func appendKubernetesScrapeWork ( dst [ ] * ScrapeWork , sdc * kubernetes . SDConfig , baseDir string , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-05-04 12:53:50 +00:00
targetLabels , err := kubernetes . GetLabels ( sdc , baseDir )
2020-04-22 19:16:01 +00:00
if err != nil {
2020-05-05 21:01:49 +00:00
logger . Errorf ( "error when discovering kubernetes targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
2020-06-23 12:35:19 +00:00
return dst , false
2020-04-13 18:02:27 +00:00
}
2020-06-23 12:35:19 +00:00
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "kubernetes_sd_config" ) , true
2020-04-24 14:50:21 +00:00
}
2020-12-08 15:50:03 +00:00
func appendOpenstackScrapeWork ( dst [ ] * ScrapeWork , sdc * openstack . SDConfig , baseDir string , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-10-05 13:45:33 +00:00
targetLabels , err := openstack . GetLabels ( sdc , baseDir )
if err != nil {
logger . Errorf ( "error when discovering openstack targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
return dst , false
}
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "openstack_sd_config" ) , true
}
2020-12-08 15:50:03 +00:00
func appendDockerSwarmScrapeWork ( dst [ ] * ScrapeWork , sdc * dockerswarm . SDConfig , baseDir string , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-10-12 10:38:21 +00:00
targetLabels , err := dockerswarm . GetLabels ( sdc , baseDir )
if err != nil {
logger . Errorf ( "error when discovering dockerswarm targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
return dst , false
}
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "dockerswarm_sd_config" ) , true
}
2020-12-08 15:50:03 +00:00
func appendConsulScrapeWork ( dst [ ] * ScrapeWork , sdc * consul . SDConfig , baseDir string , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-05-04 17:48:02 +00:00
targetLabels , err := consul . GetLabels ( sdc , baseDir )
if err != nil {
2020-05-05 21:01:49 +00:00
logger . Errorf ( "error when discovering consul targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
2020-06-23 12:35:19 +00:00
return dst , false
2020-05-04 17:48:02 +00:00
}
2020-06-23 12:35:19 +00:00
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "consul_sd_config" ) , true
2020-05-04 17:48:02 +00:00
}
2020-12-08 15:50:03 +00:00
func appendEurekaScrapeWork ( dst [ ] * ScrapeWork , sdc * eureka . SDConfig , baseDir string , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-11-20 11:38:12 +00:00
targetLabels , err := eureka . GetLabels ( sdc , baseDir )
if err != nil {
logger . Errorf ( "error when discovering eureka targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
return dst , false
}
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "eureka_sd_config" ) , true
}
2020-12-08 15:50:03 +00:00
func appendDNSScrapeWork ( dst [ ] * ScrapeWork , sdc * dns . SDConfig , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-05-05 21:01:49 +00:00
targetLabels , err := dns . GetLabels ( sdc )
if err != nil {
logger . Errorf ( "error when discovering dns targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
2020-06-23 12:35:19 +00:00
return dst , false
2020-05-05 21:01:49 +00:00
}
2020-06-23 12:35:19 +00:00
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "dns_sd_config" ) , true
2020-05-05 21:01:49 +00:00
}
2020-12-08 15:50:03 +00:00
func appendEC2ScrapeWork ( dst [ ] * ScrapeWork , sdc * ec2 . SDConfig , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-04-27 16:25:45 +00:00
targetLabels , err := ec2 . GetLabels ( sdc )
if err != nil {
2020-05-05 21:01:49 +00:00
logger . Errorf ( "error when discovering ec2 targets for `job_name` %q: %s; skipping it" , swc . jobName , err )
2020-06-23 12:35:19 +00:00
return dst , false
2020-04-27 16:25:45 +00:00
}
2020-06-23 12:35:19 +00:00
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "ec2_sd_config" ) , true
2020-04-27 16:25:45 +00:00
}
2020-12-08 15:50:03 +00:00
func appendGCEScrapeWork ( dst [ ] * ScrapeWork , sdc * gce . SDConfig , swc * scrapeWorkConfig ) ( [ ] * ScrapeWork , bool ) {
2020-04-24 14:50:21 +00:00
targetLabels , err := gce . GetLabels ( sdc )
if err != nil {
2020-05-05 21:01:49 +00:00
logger . Errorf ( "error when discovering gce targets for `job_name` %q: %s; skippint it" , swc . jobName , err )
2020-06-23 12:35:19 +00:00
return dst , false
2020-04-24 14:50:21 +00:00
}
2020-06-23 12:35:19 +00:00
return appendScrapeWorkForTargetLabels ( dst , swc , targetLabels , "gce_sd_config" ) , true
2020-04-24 14:50:21 +00:00
}
2020-12-08 15:50:03 +00:00
func appendScrapeWorkForTargetLabels ( dst [ ] * ScrapeWork , swc * scrapeWorkConfig , targetLabels [ ] map [ string ] string , sectionName string ) [ ] * ScrapeWork {
2020-04-13 18:02:27 +00:00
for _ , metaLabels := range targetLabels {
target := metaLabels [ "__address__" ]
var err error
dst , err = appendScrapeWork ( dst , swc , target , nil , metaLabels )
if err != nil {
2020-04-24 14:50:21 +00:00
logger . Errorf ( "error when parsing `%s` target %q for `job_name` %q: %s; skipping it" , sectionName , target , swc . jobName , err )
2020-04-13 18:02:27 +00:00
continue
}
}
return dst
}
2020-12-08 15:50:03 +00:00
func ( sdc * FileSDConfig ) appendScrapeWork ( dst [ ] * ScrapeWork , swsMapPrev map [ string ] [ ] * ScrapeWork , baseDir string , swc * scrapeWorkConfig ) [ ] * ScrapeWork {
2020-02-23 11:35:47 +00:00
for _ , file := range sdc . Files {
pathPattern := getFilepath ( baseDir , file )
paths := [ ] string { pathPattern }
if strings . Contains ( pathPattern , "*" ) {
var err error
paths , err = filepath . Glob ( pathPattern )
if err != nil {
2020-04-13 09:59:05 +00:00
// Do not return this error, since other files may contain valid scrape configs.
logger . Errorf ( "invalid pattern %q in `files` section: %s; skipping it" , file , err )
continue
2020-02-23 11:35:47 +00:00
}
}
for _ , path := range paths {
stcs , err := loadStaticConfigs ( path )
if err != nil {
// Do not return this error, since other paths may contain valid scrape configs.
2020-05-03 09:41:13 +00:00
if sws := swsMapPrev [ path ] ; sws != nil {
2020-02-23 11:35:47 +00:00
// Re-use the previous valid scrape work for this path.
logger . Errorf ( "keeping the previously loaded `static_configs` from %q because of error when re-loading the file: %s" , path , err )
dst = append ( dst , sws ... )
} else {
logger . Errorf ( "skipping loading `static_configs` from %q because of error: %s" , path , err )
}
continue
}
pathShort := path
if strings . HasPrefix ( pathShort , baseDir ) {
pathShort = path [ len ( baseDir ) : ]
if len ( pathShort ) > 0 && pathShort [ 0 ] == filepath . Separator {
pathShort = pathShort [ 1 : ]
}
}
2020-04-13 09:59:05 +00:00
metaLabels := map [ string ] string {
2020-02-23 11:35:47 +00:00
"__meta_filepath" : pathShort ,
2020-05-03 09:41:13 +00:00
"__vm_filepath" : path , // This label is needed for internal promscrape logic
2020-02-23 11:35:47 +00:00
}
for i := range stcs {
2020-04-13 09:59:05 +00:00
dst = stcs [ i ] . appendScrapeWork ( dst , swc , metaLabels )
2020-02-23 11:35:47 +00:00
}
}
}
2020-04-13 09:59:05 +00:00
return dst
2020-02-23 11:35:47 +00:00
}
2020-12-08 15:50:03 +00:00
func ( stc * StaticConfig ) appendScrapeWork ( dst [ ] * ScrapeWork , swc * scrapeWorkConfig , metaLabels map [ string ] string ) [ ] * ScrapeWork {
2020-02-23 11:35:47 +00:00
for _ , target := range stc . Targets {
if target == "" {
2020-04-13 09:59:05 +00:00
// Do not return this error, since other targets may be valid
logger . Errorf ( "`static_configs` target for `job_name` %q cannot be empty; skipping it" , swc . jobName )
2020-02-23 11:35:47 +00:00
continue
}
2020-04-13 09:59:05 +00:00
var err error
dst , err = appendScrapeWork ( dst , swc , target , stc . Labels , metaLabels )
if err != nil {
// Do not return this error, since other targets may be valid
logger . Errorf ( "error when parsing `static_configs` target %q for `job_name` %q: %s; skipping it" , target , swc . jobName , err )
2020-02-23 11:35:47 +00:00
continue
}
}
2020-04-13 09:59:05 +00:00
return dst
}
2020-12-08 15:50:03 +00:00
func appendScrapeWork ( dst [ ] * ScrapeWork , swc * scrapeWorkConfig , target string , extraLabels , metaLabels map [ string ] string ) ( [ ] * ScrapeWork , error ) {
2020-04-13 09:59:05 +00:00
labels := mergeLabels ( swc . jobName , swc . scheme , target , swc . metricsPath , extraLabels , swc . externalLabels , metaLabels , swc . params )
2020-11-04 09:08:30 +00:00
var originalLabels [ ] prompbmarshal . Label
if ! * dropOriginalLabels {
originalLabels = append ( [ ] prompbmarshal . Label { } , labels ... )
promrelabel . SortLabels ( originalLabels )
2020-12-08 10:22:57 +00:00
// Reduce memory usage by interning all the strings in originalLabels.
internLabelStrings ( originalLabels )
2020-11-04 09:08:30 +00:00
}
2020-04-13 09:59:05 +00:00
labels = promrelabel . ApplyRelabelConfigs ( labels , 0 , swc . relabelConfigs , false )
2020-04-14 09:21:10 +00:00
labels = promrelabel . RemoveMetaLabels ( labels [ : 0 ] , labels )
2020-11-09 08:54:24 +00:00
// Remove references to already deleted labels, so GC could clean strings for label name and label value past len(labels).
2020-11-07 14:16:56 +00:00
// This should reduce memory usage when relabeling creates big number of temporary labels with long names and/or values.
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/825 for details.
2020-11-09 08:54:24 +00:00
labels = append ( [ ] prompbmarshal . Label { } , labels ... )
2020-11-07 14:16:56 +00:00
2020-04-13 09:59:05 +00:00
if len ( labels ) == 0 {
// Drop target without labels.
2020-10-20 18:44:59 +00:00
droppedTargetsMap . Register ( originalLabels )
2020-04-13 09:59:05 +00:00
return dst , nil
}
// See https://www.robustperception.io/life-of-a-label
2020-04-14 11:11:54 +00:00
schemeRelabeled := promrelabel . GetLabelValueByName ( labels , "__scheme__" )
if len ( schemeRelabeled ) == 0 {
2020-04-13 09:59:05 +00:00
schemeRelabeled = "http"
}
2020-04-14 11:11:54 +00:00
addressRelabeled := promrelabel . GetLabelValueByName ( labels , "__address__" )
if len ( addressRelabeled ) == 0 {
2020-04-13 09:59:05 +00:00
// Drop target without scrape address.
2020-10-20 18:44:59 +00:00
droppedTargetsMap . Register ( originalLabels )
2020-04-13 09:59:05 +00:00
return dst , nil
}
2020-05-03 13:51:03 +00:00
if strings . Contains ( addressRelabeled , "/" ) {
2020-04-13 09:59:05 +00:00
// Drop target with '/'
2020-10-20 18:44:59 +00:00
droppedTargetsMap . Register ( originalLabels )
2020-04-13 09:59:05 +00:00
return dst , nil
}
2020-05-03 13:41:33 +00:00
addressRelabeled = addMissingPort ( schemeRelabeled , addressRelabeled )
2020-04-14 11:11:54 +00:00
metricsPathRelabeled := promrelabel . GetLabelValueByName ( labels , "__metrics_path__" )
2020-04-13 09:59:05 +00:00
if metricsPathRelabeled == "" {
metricsPathRelabeled = "/metrics"
}
2020-10-29 05:39:42 +00:00
if ! strings . HasPrefix ( metricsPathRelabeled , "/" ) {
metricsPathRelabeled = "/" + metricsPathRelabeled
}
2020-04-13 09:59:05 +00:00
paramsRelabeled := getParamsFromLabels ( labels , swc . params )
optionalQuestion := "?"
if len ( paramsRelabeled ) == 0 || strings . Contains ( metricsPathRelabeled , "?" ) {
optionalQuestion = ""
}
paramsStr := url . Values ( paramsRelabeled ) . Encode ( )
2020-05-03 13:51:03 +00:00
scrapeURL := fmt . Sprintf ( "%s://%s%s%s%s" , schemeRelabeled , addressRelabeled , metricsPathRelabeled , optionalQuestion , paramsStr )
2020-04-13 09:59:05 +00:00
if _ , err := url . Parse ( scrapeURL ) ; err != nil {
2020-06-30 19:58:18 +00:00
return dst , fmt . Errorf ( "invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %w" ,
2020-05-03 13:51:03 +00:00
scrapeURL , swc . scheme , schemeRelabeled , target , addressRelabeled , swc . metricsPath , metricsPathRelabeled , swc . jobName , err )
2020-04-13 09:59:05 +00:00
}
2020-05-03 13:41:33 +00:00
// Set missing "instance" label according to https://www.robustperception.io/life-of-a-label
if promrelabel . GetLabelByName ( labels , "instance" ) == nil {
labels = append ( labels , prompbmarshal . Label {
Name : "instance" ,
Value : addressRelabeled ,
} )
promrelabel . SortLabels ( labels )
}
2020-12-08 10:22:57 +00:00
// Reduce memory usage by interning all the strings in labels.
internLabelStrings ( labels )
2020-12-08 15:50:03 +00:00
dst = append ( dst , & ScrapeWork {
2020-04-13 09:59:05 +00:00
ScrapeURL : scrapeURL ,
ScrapeInterval : swc . scrapeInterval ,
ScrapeTimeout : swc . scrapeTimeout ,
HonorLabels : swc . honorLabels ,
HonorTimestamps : swc . honorTimestamps ,
2020-10-08 15:50:22 +00:00
OriginalLabels : originalLabels ,
2020-04-13 09:59:05 +00:00
Labels : labels ,
2020-12-24 08:52:37 +00:00
ProxyURL : swc . proxyURL ,
2020-04-13 09:59:05 +00:00
AuthConfig : swc . authConfig ,
MetricRelabelConfigs : swc . metricRelabelConfigs ,
2020-04-14 08:58:15 +00:00
SampleLimit : swc . sampleLimit ,
2020-07-02 11:19:11 +00:00
DisableCompression : swc . disableCompression ,
DisableKeepAlive : swc . disableKeepAlive ,
2020-11-01 21:12:13 +00:00
StreamParse : swc . streamParse ,
2020-06-23 12:35:19 +00:00
jobNameOriginal : swc . jobName ,
2020-04-13 09:59:05 +00:00
} )
2020-02-23 11:35:47 +00:00
return dst , nil
}
2020-12-08 10:22:57 +00:00
func internLabelStrings ( labels [ ] prompbmarshal . Label ) {
for i := range labels {
label := & labels [ i ]
label . Name = internString ( label . Name )
label . Value = internString ( label . Value )
}
}
func internString ( s string ) string {
internStringsMapLock . Lock ( )
defer internStringsMapLock . Unlock ( )
if sInterned , ok := internStringsMap [ s ] ; ok {
return sInterned
}
// Make a new copy for s in order to remove references from possible bigger string s refers to.
sCopy := string ( append ( [ ] byte { } , s ... ) )
internStringsMap [ sCopy ] = sCopy
if len ( internStringsMap ) > 100e3 {
internStringsMap = make ( map [ string ] string , 100e3 )
}
return sCopy
}
var (
internStringsMapLock sync . Mutex
internStringsMap = make ( map [ string ] string , 100e3 )
)
2020-02-23 11:35:47 +00:00
func getParamsFromLabels ( labels [ ] prompbmarshal . Label , paramsOrig map [ string ] [ ] string ) map [ string ] [ ] string {
// See https://www.robustperception.io/life-of-a-label
m := make ( map [ string ] [ ] string )
for i := range labels {
label := & labels [ i ]
if ! strings . HasPrefix ( label . Name , "__param_" ) {
continue
}
name := label . Name [ len ( "__param_" ) : ]
values := [ ] string { label . Value }
if p := paramsOrig [ name ] ; len ( p ) > 1 {
values = append ( values , p [ 1 : ] ... )
}
m [ name ] = values
}
return m
}
2020-04-13 09:59:05 +00:00
func mergeLabels ( job , scheme , target , metricsPath string , extraLabels , externalLabels , metaLabels map [ string ] string , params map [ string ] [ ] string ) [ ] prompbmarshal . Label {
2020-02-23 11:35:47 +00:00
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
2020-03-12 18:17:13 +00:00
m := make ( map [ string ] string )
2020-02-23 11:35:47 +00:00
for k , v := range externalLabels {
m [ k ] = v
}
2020-03-12 18:17:13 +00:00
m [ "job" ] = job
2020-05-03 13:41:33 +00:00
m [ "__address__" ] = target
2020-03-12 18:17:13 +00:00
m [ "__scheme__" ] = scheme
m [ "__metrics_path__" ] = metricsPath
2020-02-23 11:35:47 +00:00
for k , args := range params {
if len ( args ) == 0 {
continue
}
k = "__param_" + k
v := args [ 0 ]
2020-03-12 18:17:13 +00:00
m [ k ] = v
}
2020-04-13 09:59:05 +00:00
for k , v := range extraLabels {
2020-03-12 18:17:13 +00:00
m [ k ] = v
}
for k , v := range metaLabels {
2020-02-23 11:35:47 +00:00
m [ k ] = v
}
result := make ( [ ] prompbmarshal . Label , 0 , len ( m ) )
for k , v := range m {
result = append ( result , prompbmarshal . Label {
Name : k ,
Value : v ,
} )
}
2020-04-13 09:59:05 +00:00
return result
2020-02-23 11:35:47 +00:00
}
func getFilepath ( baseDir , path string ) string {
if filepath . IsAbs ( path ) {
return path
}
return filepath . Join ( baseDir , path )
}
func addMissingPort ( scheme , target string ) string {
if strings . Contains ( target , ":" ) {
return target
}
if scheme == "https" {
target += ":443"
} else {
target += ":80"
}
return target
}
const (
defaultScrapeInterval = time . Minute
defaultScrapeTimeout = 10 * time . Second
)