fix some typo (#3898)

This commit is contained in:
Haleygo 2023-03-03 18:02:13 +08:00 committed by GitHub
parent de621c0cb7
commit d056be710b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 4 additions and 4 deletions

View file

@ -53,7 +53,7 @@ func main() {
if err := promscrape.CheckConfig(); err != nil { if err := promscrape.CheckConfig(); err != nil {
logger.Fatalf("error when checking -promscrape.config: %s", err) logger.Fatalf("error when checking -promscrape.config: %s", err)
} }
logger.Infof("-promscrape.config is ok; exitting with 0 status code") logger.Infof("-promscrape.config is ok; exiting with 0 status code")
return return
} }

View file

@ -33,7 +33,7 @@ var (
"Note that disabling HTTP keep-alive may increase load on both vmagent and scrape targets") "Note that disabling HTTP keep-alive may increase load on both vmagent and scrape targets")
streamParse = flag.Bool("promscrape.streamParse", false, "Whether to enable stream parsing for metrics obtained from scrape targets. This may be useful "+ streamParse = flag.Bool("promscrape.streamParse", false, "Whether to enable stream parsing for metrics obtained from scrape targets. This may be useful "+
"for reducing memory usage when millions of metrics are exposed per each scrape target. "+ "for reducing memory usage when millions of metrics are exposed per each scrape target. "+
"It is posible to set 'stream_parse: true' individually per each 'scrape_config' section in '-promscrape.config' for fine grained control") "It is possible to set 'stream_parse: true' individually per each 'scrape_config' section in '-promscrape.config' for fine grained control")
) )
type client struct { type client struct {
@ -260,7 +260,7 @@ func (c *client) ReadData(dst []byte) ([]byte, error) {
swapResponseBodies := len(dst) == 0 swapResponseBodies := len(dst) == 0
if swapResponseBodies { if swapResponseBodies {
// An optimization: write response directly to dst. // An optimization: write response directly to dst.
// This should reduce memory uage when scraping big targets. // This should reduce memory usage when scraping big targets.
dst = resp.SwapBody(dst) dst = resp.SwapBody(dst)
} }

View file

@ -23,7 +23,7 @@ type UnmarshalWork interface {
// StartUnmarshalWorkers starts unmarshal workers. // StartUnmarshalWorkers starts unmarshal workers.
func StartUnmarshalWorkers() { func StartUnmarshalWorkers() {
if unmarshalWorkCh != nil { if unmarshalWorkCh != nil {
logger.Panicf("BUG: it looks like startUnmarshalWorkers() has been alread called without stopUnmarshalWorkers()") logger.Panicf("BUG: it looks like startUnmarshalWorkers() has been already called without stopUnmarshalWorkers()")
} }
gomaxprocs := cgroup.AvailableCPUs() gomaxprocs := cgroup.AvailableCPUs()
unmarshalWorkCh = make(chan UnmarshalWork, gomaxprocs) unmarshalWorkCh = make(chan UnmarshalWork, gomaxprocs)