mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-21 14:44:00 +00:00
app/vmctl: switch from table-driven tests to f-tests
This simplifies debugging tests and makes the test code more clear and concise.
See https://itnext.io/f-tests-as-a-replacement-for-table-driven-tests-in-go-8814a8b19e9e
While at is, consistently use t.Fatal* instead of t.Error* across tests, since t.Error*
requires more boilerplate code, which can result in additional bugs inside tests.
While t.Error* allows writing logging errors for the same, this doesn't simplify fixing
broken tests most of the time.
This is a follow-up for a9525da8a4
This commit is contained in:
parent
e84309142a
commit
cedbbdec30
10 changed files with 975 additions and 1415 deletions
|
@ -7,103 +7,106 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
func TestRetry_Do(t *testing.T) {
|
||||
counter := 0
|
||||
tests := []struct {
|
||||
name string
|
||||
backoffRetries int
|
||||
backoffFactor float64
|
||||
backoffMinDuration time.Duration
|
||||
retryableFunc retryableFunc
|
||||
cancelTimeout time.Duration
|
||||
want uint64
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "return bad request",
|
||||
retryableFunc: func() error {
|
||||
return ErrBadRequest
|
||||
},
|
||||
want: 0,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "empty retries values",
|
||||
retryableFunc: func() error {
|
||||
time.Sleep(time.Millisecond * 100)
|
||||
return nil
|
||||
},
|
||||
want: 0,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "only one retry test",
|
||||
backoffRetries: 5,
|
||||
backoffFactor: 1.7,
|
||||
backoffMinDuration: time.Millisecond * 10,
|
||||
retryableFunc: func() error {
|
||||
t := time.NewTicker(time.Millisecond * 5)
|
||||
defer t.Stop()
|
||||
for range t.C {
|
||||
counter++
|
||||
if counter%2 == 0 {
|
||||
return fmt.Errorf("got some error")
|
||||
}
|
||||
if counter%3 == 0 {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
want: 1,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "all retries failed test",
|
||||
backoffRetries: 5,
|
||||
backoffFactor: 0.1,
|
||||
backoffMinDuration: time.Millisecond * 10,
|
||||
retryableFunc: func() error {
|
||||
t := time.NewTicker(time.Millisecond * 5)
|
||||
defer t.Stop()
|
||||
for range t.C {
|
||||
return fmt.Errorf("got some error")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
want: 5,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "cancel context",
|
||||
backoffRetries: 5,
|
||||
backoffFactor: 1.7,
|
||||
backoffMinDuration: time.Millisecond * 10,
|
||||
retryableFunc: func() error {
|
||||
return fmt.Errorf("got some error")
|
||||
},
|
||||
cancelTimeout: time.Millisecond * 40,
|
||||
want: 3,
|
||||
wantErr: true,
|
||||
},
|
||||
func TestBackoffRetry_Failure(t *testing.T) {
|
||||
f := func(backoffFactor float64, backoffRetries int, cancelTimeout time.Duration, retryFunc func() error, resultExpected int) {
|
||||
t.Helper()
|
||||
|
||||
r := &Backoff{
|
||||
retries: backoffRetries,
|
||||
factor: backoffFactor,
|
||||
minDuration: time.Millisecond * 10,
|
||||
}
|
||||
ctx := context.Background()
|
||||
if cancelTimeout != 0 {
|
||||
newCtx, cancelFn := context.WithTimeout(context.Background(), cancelTimeout)
|
||||
ctx = newCtx
|
||||
defer cancelFn()
|
||||
}
|
||||
|
||||
result, err := r.Retry(ctx, retryFunc)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil error")
|
||||
}
|
||||
if result != uint64(resultExpected) {
|
||||
t.Fatalf("unexpected result: got %d; want %d", result, resultExpected)
|
||||
}
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := &Backoff{retries: tt.backoffRetries, factor: tt.backoffFactor, minDuration: tt.backoffMinDuration}
|
||||
ctx := context.Background()
|
||||
if tt.cancelTimeout != 0 {
|
||||
newCtx, cancelFn := context.WithTimeout(context.Background(), tt.cancelTimeout)
|
||||
ctx = newCtx
|
||||
defer cancelFn()
|
||||
}
|
||||
got, err := r.Retry(ctx, tt.retryableFunc)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("Retry() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("Retry() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
|
||||
// return bad request
|
||||
retryFunc := func() error {
|
||||
return ErrBadRequest
|
||||
}
|
||||
f(0, 0, 0, retryFunc, 0)
|
||||
|
||||
// empty retries values
|
||||
retryFunc = func() error {
|
||||
time.Sleep(time.Millisecond * 100)
|
||||
return nil
|
||||
}
|
||||
f(0, 0, 0, retryFunc, 0)
|
||||
|
||||
// all retries failed test
|
||||
backoffFactor := 0.1
|
||||
backoffRetries := 5
|
||||
cancelTimeout := time.Second * 0
|
||||
retryFunc = func() error {
|
||||
t := time.NewTicker(time.Millisecond * 5)
|
||||
defer t.Stop()
|
||||
for range t.C {
|
||||
return fmt.Errorf("got some error")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
resultExpected := 5
|
||||
f(backoffFactor, backoffRetries, cancelTimeout, retryFunc, resultExpected)
|
||||
|
||||
// cancel context
|
||||
backoffFactor = 1.7
|
||||
backoffRetries = 5
|
||||
cancelTimeout = time.Millisecond * 40
|
||||
retryFunc = func() error {
|
||||
return fmt.Errorf("got some error")
|
||||
}
|
||||
resultExpected = 3
|
||||
f(backoffFactor, backoffRetries, cancelTimeout, retryFunc, resultExpected)
|
||||
}
|
||||
|
||||
func TestBackoffRetry_Success(t *testing.T) {
|
||||
f := func(retryFunc func() error, resultExpected int) {
|
||||
t.Helper()
|
||||
|
||||
r := &Backoff{
|
||||
retries: 5,
|
||||
factor: 1.7,
|
||||
minDuration: time.Millisecond * 10,
|
||||
}
|
||||
ctx := context.Background()
|
||||
|
||||
result, err := r.Retry(ctx, retryFunc)
|
||||
if err != nil {
|
||||
t.Fatalf("Retry() error: %s", err)
|
||||
}
|
||||
if result != uint64(resultExpected) {
|
||||
t.Fatalf("unexpected result: got %d; want %d", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
// only one retry test
|
||||
counter := 0
|
||||
retryFunc := func() error {
|
||||
t := time.NewTicker(time.Millisecond * 5)
|
||||
defer t.Stop()
|
||||
for range t.C {
|
||||
counter++
|
||||
if counter%2 == 0 {
|
||||
return fmt.Errorf("got some error")
|
||||
}
|
||||
if counter%3 == 0 {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
resultExpected := 1
|
||||
f(retryFunc, resultExpected)
|
||||
}
|
||||
|
|
|
@ -3,125 +3,97 @@ package influx
|
|||
import "testing"
|
||||
|
||||
func TestFetchQuery(t *testing.T) {
|
||||
testCases := []struct {
|
||||
s Series
|
||||
timeFilter string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
s: Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: `select "value" from "cpu" where "foo"::tag='bar'`,
|
||||
},
|
||||
{
|
||||
s: Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "bar",
|
||||
},
|
||||
{
|
||||
Name: "baz",
|
||||
Value: "qux",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: `select "value" from "cpu" where "foo"::tag='bar' and "baz"::tag='qux'`,
|
||||
},
|
||||
{
|
||||
s: Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "b'ar",
|
||||
},
|
||||
},
|
||||
},
|
||||
timeFilter: "time >= now()",
|
||||
expected: `select "value" from "cpu" where "foo"::tag='b\'ar' and time >= now()`,
|
||||
},
|
||||
{
|
||||
s: Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "name",
|
||||
Value: `dev-mapper-centos\x2dswap.swap`,
|
||||
},
|
||||
{
|
||||
Name: "state",
|
||||
Value: "dev-mapp'er-c'en'tos",
|
||||
},
|
||||
},
|
||||
},
|
||||
timeFilter: "time >= now()",
|
||||
expected: `select "value" from "cpu" where "name"::tag='dev-mapper-centos\\x2dswap.swap' and "state"::tag='dev-mapp\'er-c\'en\'tos' and time >= now()`,
|
||||
},
|
||||
{
|
||||
s: Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
},
|
||||
timeFilter: "time >= now()",
|
||||
expected: `select "value" from "cpu" where time >= now()`,
|
||||
},
|
||||
{
|
||||
s: Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
},
|
||||
expected: `select "value" from "cpu"`,
|
||||
},
|
||||
}
|
||||
f := func(s *Series, timeFilter, resultExpected string) {
|
||||
t.Helper()
|
||||
|
||||
for _, tc := range testCases {
|
||||
query := tc.s.fetchQuery(tc.timeFilter)
|
||||
if query != tc.expected {
|
||||
t.Fatalf("got: \n%s;\nexpected: \n%s", query, tc.expected)
|
||||
result := s.fetchQuery(timeFilter)
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result\ngot\n%s\nwant\n%s", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
f(&Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
}, "", `select "value" from "cpu" where "foo"::tag='bar'`)
|
||||
|
||||
f(&Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "bar",
|
||||
},
|
||||
{
|
||||
Name: "baz",
|
||||
Value: "qux",
|
||||
},
|
||||
},
|
||||
}, "", `select "value" from "cpu" where "foo"::tag='bar' and "baz"::tag='qux'`)
|
||||
|
||||
f(&Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "b'ar",
|
||||
},
|
||||
},
|
||||
}, "time >= now()", `select "value" from "cpu" where "foo"::tag='b\'ar' and time >= now()`)
|
||||
|
||||
f(&Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "name",
|
||||
Value: `dev-mapper-centos\x2dswap.swap`,
|
||||
},
|
||||
{
|
||||
Name: "state",
|
||||
Value: "dev-mapp'er-c'en'tos",
|
||||
},
|
||||
},
|
||||
}, "time >= now()", `select "value" from "cpu" where "name"::tag='dev-mapper-centos\\x2dswap.swap' and "state"::tag='dev-mapp\'er-c\'en\'tos' and time >= now()`)
|
||||
|
||||
f(&Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
}, "time >= now()", `select "value" from "cpu" where time >= now()`)
|
||||
|
||||
f(&Series{
|
||||
Measurement: "cpu",
|
||||
Field: "value",
|
||||
}, "", `select "value" from "cpu"`)
|
||||
}
|
||||
|
||||
func TestTimeFilter(t *testing.T) {
|
||||
testCases := []struct {
|
||||
start string
|
||||
end string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
start: "2020-01-01T20:07:00Z",
|
||||
end: "2020-01-01T21:07:00Z",
|
||||
expected: "time >= '2020-01-01T20:07:00Z' and time <= '2020-01-01T21:07:00Z'",
|
||||
},
|
||||
{
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
start: "2020-01-01T20:07:00Z",
|
||||
expected: "time >= '2020-01-01T20:07:00Z'",
|
||||
},
|
||||
{
|
||||
end: "2020-01-01T21:07:00Z",
|
||||
expected: "time <= '2020-01-01T21:07:00Z'",
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
f := timeFilter(tc.start, tc.end)
|
||||
if f != tc.expected {
|
||||
t.Fatalf("got: \n%q;\nexpected: \n%q", f, tc.expected)
|
||||
f := func(start, end, resultExpected string) {
|
||||
t.Helper()
|
||||
|
||||
result := timeFilter(start, end)
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result\ngot\n%v\nwant\n%s", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
// no start and end filters
|
||||
f("", "", "")
|
||||
|
||||
// missing end filter
|
||||
f("2020-01-01T20:07:00Z", "", "time >= '2020-01-01T20:07:00Z'")
|
||||
|
||||
// missing start filter
|
||||
f("", "2020-01-01T21:07:00Z", "time <= '2020-01-01T21:07:00Z'")
|
||||
|
||||
// both start and end filters
|
||||
f("2020-01-01T20:07:00Z", "2020-01-01T21:07:00Z", "time >= '2020-01-01T20:07:00Z' and time <= '2020-01-01T21:07:00Z'")
|
||||
}
|
||||
|
|
|
@ -6,71 +6,71 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
func TestSeries_Unmarshal(t *testing.T) {
|
||||
func TestSeriesUnmarshal(t *testing.T) {
|
||||
f := func(s string, resultExpected *Series) {
|
||||
t.Helper()
|
||||
|
||||
result := &Series{}
|
||||
if err := result.unmarshal(s); err != nil {
|
||||
t.Fatalf("cannot unmarshal series from %q: %s", s, err)
|
||||
}
|
||||
if !reflect.DeepEqual(result, resultExpected) {
|
||||
t.Fatalf("unexpected result\ngot\n%v\nwant\n%v", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
tag := func(name, value string) LabelPair {
|
||||
return LabelPair{
|
||||
Name: name,
|
||||
Value: value,
|
||||
}
|
||||
}
|
||||
series := func(measurement string, lp ...LabelPair) Series {
|
||||
return Series{
|
||||
series := func(measurement string, lp ...LabelPair) *Series {
|
||||
return &Series{
|
||||
Measurement: measurement,
|
||||
LabelPairs: lp,
|
||||
}
|
||||
}
|
||||
testCases := []struct {
|
||||
got string
|
||||
want Series
|
||||
}{
|
||||
{
|
||||
got: "cpu",
|
||||
want: series("cpu"),
|
||||
},
|
||||
{
|
||||
got: "cpu,host=localhost",
|
||||
want: series("cpu", tag("host", "localhost")),
|
||||
},
|
||||
{
|
||||
got: "cpu,host=localhost,instance=instance",
|
||||
want: series("cpu", tag("host", "localhost"), tag("instance", "instance")),
|
||||
},
|
||||
{
|
||||
got: `fo\,bar\=baz,x\=\b=\\a\,\=\q\ `,
|
||||
want: series("fo,bar=baz", tag(`x=\b`, `\a,=\q `)),
|
||||
},
|
||||
{
|
||||
got: "cpu,host=192.168.0.1,instance=fe80::fdc8:5e36:c2c6:baac%utun1",
|
||||
want: series("cpu", tag("host", "192.168.0.1"), tag("instance", "fe80::fdc8:5e36:c2c6:baac%utun1")),
|
||||
},
|
||||
{
|
||||
got: `cpu,db=db1,host=localhost,server=host\=localhost\ user\=user\ `,
|
||||
want: series("cpu", tag("db", "db1"),
|
||||
tag("host", "localhost"), tag("server", "host=localhost user=user ")),
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
s := Series{}
|
||||
if err := s.unmarshal(tc.got); err != nil {
|
||||
t.Fatalf("%q: unmarshal err: %s", tc.got, err)
|
||||
}
|
||||
if !reflect.DeepEqual(s, tc.want) {
|
||||
t.Fatalf("%q: expected\n%#v\nto be equal\n%#v", tc.got, s, tc.want)
|
||||
}
|
||||
}
|
||||
|
||||
f("cpu", series("cpu"))
|
||||
|
||||
f("cpu,host=localhost", series("cpu", tag("host", "localhost")))
|
||||
|
||||
f("cpu,host=localhost,instance=instance", series("cpu", tag("host", "localhost"), tag("instance", "instance")))
|
||||
|
||||
f(`fo\,bar\=baz,x\=\b=\\a\,\=\q\ `, series("fo,bar=baz", tag(`x=\b`, `\a,=\q `)))
|
||||
|
||||
f("cpu,host=192.168.0.1,instance=fe80::fdc8:5e36:c2c6:baac%utun1", series("cpu", tag("host", "192.168.0.1"), tag("instance", "fe80::fdc8:5e36:c2c6:baac%utun1")))
|
||||
|
||||
f(`cpu,db=db1,host=localhost,server=host\=localhost\ user\=user\ `, series("cpu", tag("db", "db1"), tag("host", "localhost"), tag("server", "host=localhost user=user ")))
|
||||
}
|
||||
|
||||
func TestToFloat64(t *testing.T) {
|
||||
f := func(in any, want float64) {
|
||||
func TestToFloat64_Failure(t *testing.T) {
|
||||
f := func(in any) {
|
||||
t.Helper()
|
||||
got, err := toFloat64(in)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected err: %s", err)
|
||||
}
|
||||
if got != want {
|
||||
t.Errorf("got %v; want %v", got, want)
|
||||
|
||||
_, err := toFloat64(in)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil error")
|
||||
}
|
||||
}
|
||||
|
||||
f("text")
|
||||
}
|
||||
|
||||
func TestToFloat64_Success(t *testing.T) {
|
||||
f := func(in any, resultExpected float64) {
|
||||
t.Helper()
|
||||
|
||||
result, err := toFloat64(in)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result: got %v; want %v", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
f("123.4", 123.4)
|
||||
f(float64(123.4), 123.4)
|
||||
f(float32(12), 12)
|
||||
|
@ -78,9 +78,4 @@ func TestToFloat64(t *testing.T) {
|
|||
f(true, 1)
|
||||
f(false, 0)
|
||||
f(json.Number("123456.789"), 123456.789)
|
||||
|
||||
_, err := toFloat64("text")
|
||||
if err == nil {
|
||||
t.Fatalf("expected to get err; got nil instead")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,30 +5,27 @@ import (
|
|||
)
|
||||
|
||||
func TestInRange(t *testing.T) {
|
||||
testCases := []struct {
|
||||
filterMin, filterMax int64
|
||||
blockMin, blockMax int64
|
||||
expected bool
|
||||
}{
|
||||
{0, 0, 1, 2, true},
|
||||
{0, 3, 1, 2, true},
|
||||
{0, 3, 4, 5, false},
|
||||
{3, 0, 1, 2, false},
|
||||
{3, 0, 2, 4, true},
|
||||
{3, 10, 1, 2, false},
|
||||
{3, 10, 1, 4, true},
|
||||
{3, 10, 5, 9, true},
|
||||
{3, 10, 9, 12, true},
|
||||
{3, 10, 12, 15, false},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
f := func(filterMin, filterMax, blockMin, blockMax int64, resultExpected bool) {
|
||||
t.Helper()
|
||||
|
||||
f := filter{
|
||||
min: tc.filterMin,
|
||||
max: tc.filterMax,
|
||||
min: filterMin,
|
||||
max: filterMax,
|
||||
}
|
||||
got := f.inRange(tc.blockMin, tc.blockMax)
|
||||
if got != tc.expected {
|
||||
t.Fatalf("got %v; expected %v: %v", got, tc.expected, tc)
|
||||
result := f.inRange(blockMin, blockMax)
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result; got %v; want %v", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
f(0, 0, 1, 2, true)
|
||||
f(0, 3, 1, 2, true)
|
||||
f(0, 3, 4, 5, false)
|
||||
f(3, 0, 1, 2, false)
|
||||
f(3, 0, 2, 4, true)
|
||||
f(3, 10, 1, 2, false)
|
||||
f(3, 10, 1, 4, true)
|
||||
f(3, 10, 5, 9, true)
|
||||
f(3, 10, 9, 12, true)
|
||||
f(3, 10, 12, 15, false)
|
||||
}
|
||||
|
|
|
@ -139,11 +139,11 @@ func Test_prometheusProcessor_run(t *testing.T) {
|
|||
|
||||
_, err = w.Write(input)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
t.Fatalf("cannot send 'Y' to importer: %s", err)
|
||||
}
|
||||
err = w.Close()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
t.Fatalf("cannot close writer: %s", err)
|
||||
}
|
||||
|
||||
stdin := os.Stdin
|
||||
|
@ -151,7 +151,6 @@ func Test_prometheusProcessor_run(t *testing.T) {
|
|||
defer func() {
|
||||
os.Stdin = stdin
|
||||
_ = r.Close()
|
||||
_ = w.Close()
|
||||
}()
|
||||
os.Stdin = r
|
||||
}
|
||||
|
@ -162,7 +161,7 @@ func Test_prometheusProcessor_run(t *testing.T) {
|
|||
}
|
||||
|
||||
if err := pp.run(); (err != nil) != tt.wantErr {
|
||||
t.Errorf("run() error = %v, wantErr %v", err, tt.wantErr)
|
||||
t.Fatalf("run() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -16,535 +16,358 @@ func mustParseDatetime(t string) time.Time {
|
|||
return result
|
||||
}
|
||||
|
||||
func Test_splitDateRange(t *testing.T) {
|
||||
type args struct {
|
||||
start string
|
||||
end string
|
||||
granularity string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want []testTimeRange
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "validates start is before end",
|
||||
args: args{
|
||||
start: "2022-02-01T00:00:00Z",
|
||||
end: "2022-01-01T00:00:00Z",
|
||||
granularity: StepMonth,
|
||||
},
|
||||
want: nil,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "validates granularity value",
|
||||
args: args{
|
||||
start: "2022-01-01T00:00:00Z",
|
||||
end: "2022-02-01T00:00:00Z",
|
||||
granularity: "non-existent-format",
|
||||
},
|
||||
want: nil,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "month chunking",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-03-03T12:12:12Z",
|
||||
granularity: StepMonth,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-28T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-03-01T00:00:00Z",
|
||||
"2022-03-03T12:12:12Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "daily chunking",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-05T12:12:12Z",
|
||||
granularity: StepDay,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T11:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-04T11:11:11Z",
|
||||
"2022-01-05T11:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-05T11:11:11Z",
|
||||
"2022-01-05T12:12:12Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "hourly chunking",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-03T14:14:14Z",
|
||||
granularity: StepHour,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T12:11:11Z",
|
||||
"2022-01-03T13:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T13:11:11Z",
|
||||
"2022-01-03T14:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T14:11:11Z",
|
||||
"2022-01-03T14:14:14Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "month chunking with one day time range",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-04T12:12:12Z",
|
||||
granularity: StepMonth,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T12:12:12Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "month chunking with same day time range",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-03T12:12:12Z",
|
||||
granularity: StepMonth,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:12:12Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "month chunking with one month and two days range",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-02-03T00:00:00Z",
|
||||
granularity: StepMonth,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-03T00:00:00Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "week chunking with not full week",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-08-05T23:59:59.999999999Z",
|
||||
granularity: StepWeek,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-05T23:59:59.999999999Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "week chunking with start of the week and end of the week",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-08-06T00:00:00Z",
|
||||
granularity: StepWeek,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "week chunking with next one day week",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-08-07T01:12:00Z",
|
||||
granularity: StepWeek,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-07T01:12:00Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "week chunking with month and not full week representation",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-09-01T01:12:00Z",
|
||||
granularity: StepWeek,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-13T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-13T00:00:00Z",
|
||||
"2023-08-20T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-20T00:00:00Z",
|
||||
"2023-08-27T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-27T00:00:00Z",
|
||||
"2023-09-01T01:12:00Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
start := mustParseDatetime(tt.args.start)
|
||||
end := mustParseDatetime(tt.args.end)
|
||||
func TestSplitDateRange_Failure(t *testing.T) {
|
||||
f := func(startStr, endStr, granularity string) {
|
||||
t.Helper()
|
||||
|
||||
got, err := SplitDateRange(start, end, tt.args.granularity, false)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("splitDateRange() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
start := mustParseDatetime(startStr)
|
||||
end := mustParseDatetime(endStr)
|
||||
|
||||
var testExpectedResults [][]time.Time
|
||||
if tt.want != nil {
|
||||
testExpectedResults = make([][]time.Time, 0)
|
||||
for _, dr := range tt.want {
|
||||
testExpectedResults = append(testExpectedResults, []time.Time{
|
||||
mustParseDatetime(dr[0]),
|
||||
mustParseDatetime(dr[1]),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(got, testExpectedResults) {
|
||||
t.Errorf("splitDateRange() got = %v, want %v", got, testExpectedResults)
|
||||
}
|
||||
})
|
||||
_, err := SplitDateRange(start, end, granularity, false)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil result")
|
||||
}
|
||||
}
|
||||
|
||||
// validates start is before end
|
||||
f("2022-02-01T00:00:00Z", "2022-01-01T00:00:00Z", StepMonth)
|
||||
|
||||
// validates granularity value
|
||||
f("2022-01-01T00:00:00Z", "2022-02-01T00:00:00Z", "non-existent-format")
|
||||
}
|
||||
|
||||
func Test_splitDateRange_reverse(t *testing.T) {
|
||||
type args struct {
|
||||
start string
|
||||
end string
|
||||
granularity string
|
||||
timeReverse bool
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want []testTimeRange
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "validates start is before end",
|
||||
args: args{
|
||||
start: "2022-02-01T00:00:00Z",
|
||||
end: "2022-01-01T00:00:00Z",
|
||||
granularity: StepMonth,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: nil,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "validates granularity value",
|
||||
args: args{
|
||||
start: "2022-01-01T00:00:00Z",
|
||||
end: "2022-02-01T00:00:00Z",
|
||||
granularity: "non-existent-format",
|
||||
timeReverse: true,
|
||||
},
|
||||
want: nil,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "month chunking",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-03-03T12:12:12Z",
|
||||
granularity: StepMonth,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-03-01T00:00:00Z",
|
||||
"2022-03-03T12:12:12Z",
|
||||
},
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-28T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "daily chunking",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-05T12:12:12Z",
|
||||
granularity: StepDay,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-05T11:11:11Z",
|
||||
"2022-01-05T12:12:12Z",
|
||||
},
|
||||
{
|
||||
"2022-01-04T11:11:11Z",
|
||||
"2022-01-05T11:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T11:11:11Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "hourly chunking",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-03T14:14:14Z",
|
||||
granularity: StepHour,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T14:11:11Z",
|
||||
"2022-01-03T14:14:14Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T13:11:11Z",
|
||||
"2022-01-03T14:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T12:11:11Z",
|
||||
"2022-01-03T13:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:11:11Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "month chunking with one day time range",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-04T12:12:12Z",
|
||||
granularity: StepMonth,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T12:12:12Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "month chunking with same day time range",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-01-03T12:12:12Z",
|
||||
granularity: StepMonth,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:12:12Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "month chunking with one month and two days range",
|
||||
args: args{
|
||||
start: "2022-01-03T11:11:11Z",
|
||||
end: "2022-02-03T00:00:00Z",
|
||||
granularity: StepMonth,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-03T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "week chunking with not full week",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-08-05T23:59:59.999999999Z",
|
||||
granularity: StepWeek,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-05T23:59:59.999999999Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "week chunking with start of the week and end of the week",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-08-06T00:00:00Z",
|
||||
granularity: StepWeek,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "week chunking with next one day week",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-08-07T01:12:00Z",
|
||||
granularity: StepWeek,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-07T01:12:00Z",
|
||||
},
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "week chunking with month and not full week representation",
|
||||
args: args{
|
||||
start: "2023-07-30T00:00:00Z",
|
||||
end: "2023-09-01T01:12:00Z",
|
||||
granularity: StepWeek,
|
||||
timeReverse: true,
|
||||
},
|
||||
want: []testTimeRange{
|
||||
{
|
||||
"2023-08-27T00:00:00Z",
|
||||
"2023-09-01T01:12:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-20T00:00:00Z",
|
||||
"2023-08-27T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-13T00:00:00Z",
|
||||
"2023-08-20T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-13T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
start := mustParseDatetime(tt.args.start)
|
||||
end := mustParseDatetime(tt.args.end)
|
||||
func TestSplitDateRange_Success(t *testing.T) {
|
||||
f := func(startStr, endStr, granularity string, resultExpected []testTimeRange) {
|
||||
t.Helper()
|
||||
|
||||
got, err := SplitDateRange(start, end, tt.args.granularity, tt.args.timeReverse)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("splitDateRange() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
start := mustParseDatetime(startStr)
|
||||
end := mustParseDatetime(endStr)
|
||||
|
||||
var testExpectedResults [][]time.Time
|
||||
if tt.want != nil {
|
||||
testExpectedResults = make([][]time.Time, 0)
|
||||
for _, dr := range tt.want {
|
||||
testExpectedResults = append(testExpectedResults, []time.Time{
|
||||
mustParseDatetime(dr[0]),
|
||||
mustParseDatetime(dr[1]),
|
||||
})
|
||||
}
|
||||
}
|
||||
result, err := SplitDateRange(start, end, granularity, false)
|
||||
if err != nil {
|
||||
t.Fatalf("SplitDateRange() error: %s", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(got, testExpectedResults) {
|
||||
t.Errorf("splitDateRange() got = %v, want %v", got, testExpectedResults)
|
||||
}
|
||||
})
|
||||
var testExpectedResults [][]time.Time
|
||||
for _, dr := range resultExpected {
|
||||
testExpectedResults = append(testExpectedResults, []time.Time{
|
||||
mustParseDatetime(dr[0]),
|
||||
mustParseDatetime(dr[1]),
|
||||
})
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(result, testExpectedResults) {
|
||||
t.Fatalf("unexpected result\ngot\n%v\nwant\n%v", result, testExpectedResults)
|
||||
}
|
||||
}
|
||||
|
||||
// month chunking
|
||||
f("2022-01-03T11:11:11Z", "2022-03-03T12:12:12Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-28T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-03-01T00:00:00Z",
|
||||
"2022-03-03T12:12:12Z",
|
||||
},
|
||||
})
|
||||
|
||||
// daily chunking
|
||||
f("2022-01-03T11:11:11Z", "2022-01-05T12:12:12Z", StepDay, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T11:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-04T11:11:11Z",
|
||||
"2022-01-05T11:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-05T11:11:11Z",
|
||||
"2022-01-05T12:12:12Z",
|
||||
},
|
||||
})
|
||||
|
||||
// hourly chunking
|
||||
f("2022-01-03T11:11:11Z", "2022-01-03T14:14:14Z", StepHour, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T12:11:11Z",
|
||||
"2022-01-03T13:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T13:11:11Z",
|
||||
"2022-01-03T14:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T14:11:11Z",
|
||||
"2022-01-03T14:14:14Z",
|
||||
},
|
||||
})
|
||||
|
||||
// month chunking with one day time range
|
||||
f("2022-01-03T11:11:11Z", "2022-01-04T12:12:12Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T12:12:12Z",
|
||||
},
|
||||
})
|
||||
|
||||
// month chunking with same day time range
|
||||
f("2022-01-03T11:11:11Z", "2022-01-03T12:12:12Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:12:12Z",
|
||||
},
|
||||
})
|
||||
|
||||
// month chunking with one month and two days range
|
||||
f("2022-01-03T11:11:11Z", "2022-02-03T00:00:00Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-03T00:00:00Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with not full week
|
||||
f("2023-07-30T00:00:00Z", "2023-08-05T23:59:59.999999999Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-05T23:59:59.999999999Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with start of the week and end of the week
|
||||
f("2023-07-30T00:00:00Z", "2023-08-06T00:00:00Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with next one day week
|
||||
f("2023-07-30T00:00:00Z", "2023-08-07T01:12:00Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-07T01:12:00Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with month and not full week representation
|
||||
f("2023-07-30T00:00:00Z", "2023-09-01T01:12:00Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-13T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-13T00:00:00Z",
|
||||
"2023-08-20T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-20T00:00:00Z",
|
||||
"2023-08-27T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-27T00:00:00Z",
|
||||
"2023-09-01T01:12:00Z",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestSplitDateRange_Reverse_Failure(t *testing.T) {
|
||||
f := func(startStr, endStr, granularity string) {
|
||||
t.Helper()
|
||||
|
||||
start := mustParseDatetime(startStr)
|
||||
end := mustParseDatetime(endStr)
|
||||
|
||||
_, err := SplitDateRange(start, end, granularity, true)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil error")
|
||||
}
|
||||
}
|
||||
|
||||
// validates start is before end
|
||||
f("2022-02-01T00:00:00Z", "2022-01-01T00:00:00Z", StepMonth)
|
||||
|
||||
// validates granularity value
|
||||
f("2022-01-01T00:00:00Z", "2022-02-01T00:00:00Z", "non-existent-format")
|
||||
}
|
||||
|
||||
func TestSplitDateRange_Reverse_Success(t *testing.T) {
|
||||
f := func(startStr, endStr, granularity string, resultExpected []testTimeRange) {
|
||||
t.Helper()
|
||||
|
||||
start := mustParseDatetime(startStr)
|
||||
end := mustParseDatetime(endStr)
|
||||
|
||||
result, err := SplitDateRange(start, end, granularity, true)
|
||||
if err != nil {
|
||||
t.Fatalf("SplitDateRange() error: %s", err)
|
||||
}
|
||||
|
||||
var testExpectedResults [][]time.Time
|
||||
for _, dr := range resultExpected {
|
||||
testExpectedResults = append(testExpectedResults, []time.Time{
|
||||
mustParseDatetime(dr[0]),
|
||||
mustParseDatetime(dr[1]),
|
||||
})
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(result, testExpectedResults) {
|
||||
t.Fatalf("unexpected result\ngot\n%v\nwant\n%v", result, testExpectedResults)
|
||||
}
|
||||
}
|
||||
|
||||
// month chunking
|
||||
f("2022-01-03T11:11:11Z", "2022-03-03T12:12:12Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-03-01T00:00:00Z",
|
||||
"2022-03-03T12:12:12Z",
|
||||
},
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-28T23:59:59.999999999Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
})
|
||||
|
||||
// daily chunking
|
||||
f("2022-01-03T11:11:11Z", "2022-01-05T12:12:12Z", StepDay, []testTimeRange{
|
||||
{
|
||||
"2022-01-05T11:11:11Z",
|
||||
"2022-01-05T12:12:12Z",
|
||||
},
|
||||
{
|
||||
"2022-01-04T11:11:11Z",
|
||||
"2022-01-05T11:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T11:11:11Z",
|
||||
},
|
||||
})
|
||||
|
||||
// hourly chunking
|
||||
f("2022-01-03T11:11:11Z", "2022-01-03T14:14:14Z", StepHour, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T14:11:11Z",
|
||||
"2022-01-03T14:14:14Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T13:11:11Z",
|
||||
"2022-01-03T14:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T12:11:11Z",
|
||||
"2022-01-03T13:11:11Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:11:11Z",
|
||||
},
|
||||
})
|
||||
|
||||
// month chunking with one day time range
|
||||
f("2022-01-03T11:11:11Z", "2022-01-04T12:12:12Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-04T12:12:12Z",
|
||||
},
|
||||
})
|
||||
|
||||
// month chunking with same day time range
|
||||
f("2022-01-03T11:11:11Z", "2022-01-03T12:12:12Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-03T12:12:12Z",
|
||||
},
|
||||
})
|
||||
|
||||
// month chunking with one month and two days range
|
||||
f("2022-01-03T11:11:11Z", "2022-02-03T00:00:00Z", StepMonth, []testTimeRange{
|
||||
{
|
||||
"2022-02-01T00:00:00Z",
|
||||
"2022-02-03T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2022-01-03T11:11:11Z",
|
||||
"2022-01-31T23:59:59.999999999Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with not full week
|
||||
f("2023-07-30T00:00:00Z", "2023-08-05T23:59:59.999999999Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-05T23:59:59.999999999Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with start of the week and end of the week
|
||||
f("2023-07-30T00:00:00Z", "2023-08-06T00:00:00Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with next one day week
|
||||
f("2023-07-30T00:00:00Z", "2023-08-07T01:12:00Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-07T01:12:00Z",
|
||||
},
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
})
|
||||
|
||||
// week chunking with month and not full week representation
|
||||
f("2023-07-30T00:00:00Z", "2023-09-01T01:12:00Z", StepWeek, []testTimeRange{
|
||||
{
|
||||
"2023-08-27T00:00:00Z",
|
||||
"2023-09-01T01:12:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-20T00:00:00Z",
|
||||
"2023-08-27T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-13T00:00:00Z",
|
||||
"2023-08-20T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-08-06T00:00:00Z",
|
||||
"2023-08-13T00:00:00Z",
|
||||
},
|
||||
{
|
||||
"2023-07-30T00:00:00Z",
|
||||
"2023-08-06T00:00:00Z",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -5,175 +5,87 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
func TestGetTime(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
s string
|
||||
want func() time.Time
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "empty string",
|
||||
s: "",
|
||||
want: func() time.Time { return time.Time{} },
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "only year",
|
||||
s: "2019",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "year and month",
|
||||
s: "2019-01",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "year and not first month",
|
||||
s: "2019-02",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 1, 0, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "year, month and day",
|
||||
s: "2019-02-01",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 1, 0, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "year, month and not first day",
|
||||
s: "2019-02-10",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 10, 0, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "year, month, day and time",
|
||||
s: "2019-02-02T00",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 2, 0, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "year, month, day and one hour time",
|
||||
s: "2019-02-02T01",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 2, 1, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "time with zero minutes",
|
||||
s: "2019-02-02T01:00",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 2, 1, 0, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "time with one minute",
|
||||
s: "2019-02-02T01:01",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 2, 1, 1, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "time with zero seconds",
|
||||
s: "2019-02-02T01:01:00",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 2, 1, 1, 0, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "timezone with one second",
|
||||
s: "2019-02-02T01:01:01",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 2, 2, 1, 1, 1, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "time with two second and timezone",
|
||||
s: "2019-07-07T20:01:02Z",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 7, 7, 20, 1, 02, 0, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "time with seconds and timezone",
|
||||
s: "2019-07-07T20:47:40+03:00",
|
||||
want: func() time.Time {
|
||||
l, _ := time.LoadLocation("Europe/Kiev")
|
||||
t := time.Date(2019, 7, 7, 20, 47, 40, 0, l)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "negative time",
|
||||
s: "-292273086-05-16T16:47:06Z",
|
||||
want: func() time.Time { return time.Time{} },
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "float timestamp representation",
|
||||
s: "1562529662.324",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2019, 7, 7, 20, 01, 02, 324e6, time.UTC)
|
||||
return t
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "negative timestamp",
|
||||
s: "-9223372036.855",
|
||||
want: func() time.Time {
|
||||
return time.Date(1970, 01, 01, 00, 00, 00, 00, time.UTC)
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "big timestamp",
|
||||
s: "1223372036855",
|
||||
want: func() time.Time {
|
||||
t := time.Date(2008, 10, 7, 9, 33, 56, 855e6, time.UTC)
|
||||
return t
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "duration time",
|
||||
s: "1h5m",
|
||||
want: func() time.Time {
|
||||
t := time.Now().Add(-1 * time.Hour).Add(-5 * time.Minute)
|
||||
return t
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := ParseTime(tt.s)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("ParseTime() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
w := tt.want()
|
||||
if got.Unix() != w.Unix() {
|
||||
t.Errorf("ParseTime() got = %v, want %v", got, w)
|
||||
}
|
||||
})
|
||||
func TestGetTime_Failure(t *testing.T) {
|
||||
f := func(s string) {
|
||||
t.Helper()
|
||||
|
||||
_, err := ParseTime(s)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil error")
|
||||
}
|
||||
}
|
||||
|
||||
// empty string
|
||||
f("")
|
||||
|
||||
// negative time
|
||||
f("-292273086-05-16T16:47:06Z")
|
||||
}
|
||||
|
||||
func TestGetTime_Success(t *testing.T) {
|
||||
f := func(s string, resultExpected time.Time) {
|
||||
t.Helper()
|
||||
|
||||
result, err := ParseTime(s)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseTime() error: %s", err)
|
||||
}
|
||||
if result.Unix() != resultExpected.Unix() {
|
||||
t.Fatalf("unexpected result; got %s; want %s", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
// only year
|
||||
f("2019", time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
// year and month
|
||||
f("2019-01", time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
// year and not first month
|
||||
f("2019-02", time.Date(2019, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
// year, month and day
|
||||
f("2019-02-01", time.Date(2019, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
// year, month and not first day
|
||||
f("2019-02-10", time.Date(2019, 2, 10, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
// year, month, day and time
|
||||
f("2019-02-02T00", time.Date(2019, 2, 2, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
// year, month, day and one hour time
|
||||
f("2019-02-02T01", time.Date(2019, 2, 2, 1, 0, 0, 0, time.UTC))
|
||||
|
||||
// time with zero minutes
|
||||
f("2019-02-02T01:00", time.Date(2019, 2, 2, 1, 0, 0, 0, time.UTC))
|
||||
|
||||
// time with one minute
|
||||
f("2019-02-02T01:01", time.Date(2019, 2, 2, 1, 1, 0, 0, time.UTC))
|
||||
|
||||
// time with zero seconds
|
||||
f("2019-02-02T01:01:00", time.Date(2019, 2, 2, 1, 1, 0, 0, time.UTC))
|
||||
|
||||
// timezone with one second
|
||||
f("2019-02-02T01:01:01", time.Date(2019, 2, 2, 1, 1, 1, 0, time.UTC))
|
||||
|
||||
// time with two second and timezone
|
||||
f("2019-07-07T20:01:02Z", time.Date(2019, 7, 7, 20, 1, 02, 0, time.UTC))
|
||||
|
||||
// time with seconds and timezone
|
||||
f("2019-07-07T20:47:40+03:00", func() time.Time {
|
||||
l, _ := time.LoadLocation("Europe/Kiev")
|
||||
return time.Date(2019, 7, 7, 20, 47, 40, 0, l)
|
||||
}())
|
||||
|
||||
// float timestamp representation",
|
||||
f("1562529662.324", time.Date(2019, 7, 7, 20, 01, 02, 324e6, time.UTC))
|
||||
|
||||
// negative timestamp
|
||||
f("-9223372036.855", time.Date(1970, 01, 01, 00, 00, 00, 00, time.UTC))
|
||||
|
||||
// big timestamp
|
||||
f("1223372036855", time.Date(2008, 10, 7, 9, 33, 56, 855e6, time.UTC))
|
||||
|
||||
// duration time
|
||||
f("1h5m", time.Now().Add(-1*time.Hour).Add(-5*time.Minute))
|
||||
}
|
||||
|
|
|
@ -7,83 +7,68 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
func TestTimeSeries_Write(t *testing.T) {
|
||||
var testCases = []struct {
|
||||
name string
|
||||
ts *TimeSeries
|
||||
exp string
|
||||
}{
|
||||
{
|
||||
name: "one datapoint",
|
||||
ts: &TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
Timestamps: []int64{1577877162200},
|
||||
Values: []float64{1},
|
||||
},
|
||||
exp: `{"metric":{"__name__":"foo","key":"val"},"timestamps":[1577877162200],"values":[1]}`,
|
||||
},
|
||||
{
|
||||
name: "multiple samples",
|
||||
ts: &TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
Timestamps: []int64{1577877162200, 15778771622400, 15778771622600},
|
||||
Values: []float64{1, 1.6263, 32.123},
|
||||
},
|
||||
exp: `{"metric":{"__name__":"foo","key":"val"},"timestamps":[1577877162200,15778771622400,15778771622600],"values":[1,1.6263,32.123]}`,
|
||||
},
|
||||
{
|
||||
name: "no samples",
|
||||
ts: &TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
},
|
||||
exp: ``,
|
||||
},
|
||||
{
|
||||
name: "inf values",
|
||||
ts: &TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
Timestamps: []int64{1577877162200, 1577877162200, 1577877162200},
|
||||
Values: []float64{0, math.Inf(-1), math.Inf(1)},
|
||||
},
|
||||
exp: `{"metric":{"__name__":"foo","key":"val"},"timestamps":[1577877162200,1577877162200,1577877162200],"values":[0,-Inf,+Inf]}`,
|
||||
},
|
||||
func TestTimeSeriesWrite(t *testing.T) {
|
||||
f := func(ts *TimeSeries, resultExpected string) {
|
||||
t.Helper()
|
||||
|
||||
var b bytes.Buffer
|
||||
_, err := ts.write(&b)
|
||||
if err != nil {
|
||||
t.Fatalf("error in TimeSeries.write: %s", err)
|
||||
}
|
||||
result := strings.TrimSpace(b.String())
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result\ngot\n%v\nwant\n%v", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
b := &bytes.Buffer{}
|
||||
_, err := tc.ts.write(b)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
got := strings.TrimSpace(b.String())
|
||||
if got != tc.exp {
|
||||
t.Fatalf("\ngot: %q\nwant: %q", got, tc.exp)
|
||||
}
|
||||
})
|
||||
}
|
||||
// one datapoint
|
||||
f(&TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
Timestamps: []int64{1577877162200},
|
||||
Values: []float64{1},
|
||||
}, `{"metric":{"__name__":"foo","key":"val"},"timestamps":[1577877162200],"values":[1]}`)
|
||||
|
||||
// multiple samples
|
||||
f(&TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
Timestamps: []int64{1577877162200, 15778771622400, 15778771622600},
|
||||
Values: []float64{1, 1.6263, 32.123},
|
||||
}, `{"metric":{"__name__":"foo","key":"val"},"timestamps":[1577877162200,15778771622400,15778771622600],"values":[1,1.6263,32.123]}`)
|
||||
|
||||
// no samples
|
||||
f(&TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
}, ``)
|
||||
|
||||
// inf values
|
||||
f(&TimeSeries{
|
||||
Name: "foo",
|
||||
LabelPairs: []LabelPair{
|
||||
{
|
||||
Name: "key",
|
||||
Value: "val",
|
||||
},
|
||||
},
|
||||
Timestamps: []int64{1577877162200, 1577877162200, 1577877162200},
|
||||
Values: []float64{0, math.Inf(-1), math.Inf(1)},
|
||||
}, `{"metric":{"__name__":"foo","key":"val"},"timestamps":[1577877162200,1577877162200,1577877162200],"values":[0,-Inf,+Inf]}`)
|
||||
}
|
||||
|
|
|
@ -2,68 +2,42 @@ package vm
|
|||
|
||||
import "testing"
|
||||
|
||||
func TestAddExtraLabelsToImportPath(t *testing.T) {
|
||||
type args struct {
|
||||
path string
|
||||
extraLabels []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "ok w/o extra labels",
|
||||
args: args{
|
||||
path: "/api/v1/import",
|
||||
},
|
||||
want: "/api/v1/import",
|
||||
},
|
||||
{
|
||||
name: "ok one extra label",
|
||||
args: args{
|
||||
path: "/api/v1/import",
|
||||
extraLabels: []string{"instance=host-1"},
|
||||
},
|
||||
want: "/api/v1/import?extra_label=instance=host-1",
|
||||
},
|
||||
{
|
||||
name: "ok two extra labels",
|
||||
args: args{
|
||||
path: "/api/v1/import",
|
||||
extraLabels: []string{"instance=host-2", "job=vmagent"},
|
||||
},
|
||||
want: "/api/v1/import?extra_label=instance=host-2&extra_label=job=vmagent",
|
||||
},
|
||||
{
|
||||
name: "ok two extra with exist param",
|
||||
args: args{
|
||||
path: "/api/v1/import?timeout=50",
|
||||
extraLabels: []string{"instance=host-2", "job=vmagent"},
|
||||
},
|
||||
want: "/api/v1/import?timeout=50&extra_label=instance=host-2&extra_label=job=vmagent",
|
||||
},
|
||||
{
|
||||
name: "bad incorrect format for extra label",
|
||||
args: args{
|
||||
path: "/api/v1/import",
|
||||
extraLabels: []string{"label=value", "bad_label_wo_value"},
|
||||
},
|
||||
want: "/api/v1/import",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := AddExtraLabelsToImportPath(tt.args.path, tt.args.extraLabels)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("AddExtraLabelsToImportPath() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("AddExtraLabelsToImportPath() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
func TestAddExtraLabelsToImportPath_Failure(t *testing.T) {
|
||||
f := func(path string, extraLabels []string) {
|
||||
t.Helper()
|
||||
|
||||
_, err := AddExtraLabelsToImportPath(path, extraLabels)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil error")
|
||||
}
|
||||
}
|
||||
|
||||
// bad incorrect format for extra label
|
||||
f("/api/v1/import", []string{"label=value", "bad_label_wo_value"})
|
||||
}
|
||||
|
||||
func TestAddExtraLabelsToImportPath_Success(t *testing.T) {
|
||||
f := func(path string, extraLabels []string, resultExpected string) {
|
||||
t.Helper()
|
||||
|
||||
result, err := AddExtraLabelsToImportPath(path, extraLabels)
|
||||
if err != nil {
|
||||
t.Fatalf("AddExtraLabelsToImportPath() error: %s", err)
|
||||
}
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result; got %q; want %q", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
// ok w/o extra labels
|
||||
f("/api/v1/import", nil, "/api/v1/import")
|
||||
|
||||
// ok one extra label
|
||||
f("/api/v1/import", []string{"instance=host-1"}, "/api/v1/import?extra_label=instance=host-1")
|
||||
|
||||
// ok two extra labels
|
||||
f("/api/v1/import", []string{"instance=host-2", "job=vmagent"}, "/api/v1/import?extra_label=instance=host-2&extra_label=job=vmagent")
|
||||
|
||||
// ok two extra with exist param
|
||||
f("/api/v1/import?timeout=50", []string{"instance=host-2", "job=vmagent"}, "/api/v1/import?timeout=50&extra_label=instance=host-2&extra_label=job=vmagent")
|
||||
}
|
||||
|
|
|
@ -13,7 +13,6 @@ import (
|
|||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/backoff"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/native"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/stepper"
|
||||
remote_read_integration "github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/testdata/servers_integration_test"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/vm"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/promql"
|
||||
|
@ -27,7 +26,82 @@ const (
|
|||
retentionPeriod = "100y"
|
||||
)
|
||||
|
||||
func Test_vmNativeProcessor_run(t *testing.T) {
|
||||
func TestVMNativeProcessorRun(t *testing.T) {
|
||||
f := func(startStr, endStr string, numOfSeries, numOfSamples int, resultExpected []vm.TimeSeries) {
|
||||
t.Helper()
|
||||
|
||||
src := remote_read_integration.NewRemoteWriteServer(t)
|
||||
dst := remote_read_integration.NewRemoteWriteServer(t)
|
||||
|
||||
defer func() {
|
||||
src.Close()
|
||||
dst.Close()
|
||||
}()
|
||||
|
||||
start, err := time.Parse(time.RFC3339, startStr)
|
||||
if err != nil {
|
||||
t.Fatalf("cannot parse start time: %s", err)
|
||||
}
|
||||
|
||||
end, err := time.Parse(time.RFC3339, endStr)
|
||||
if err != nil {
|
||||
t.Fatalf("cannot parse end time: %s", err)
|
||||
}
|
||||
|
||||
matchName := "__name__"
|
||||
matchValue := ".*"
|
||||
filter := native.Filter{
|
||||
Match: fmt.Sprintf("{%s=~%q}", matchName, matchValue),
|
||||
TimeStart: startStr,
|
||||
TimeEnd: endStr,
|
||||
}
|
||||
|
||||
rws := remote_read_integration.GenerateVNSeries(start.Unix(), end.Unix(), int64(numOfSeries), int64(numOfSamples))
|
||||
|
||||
src.Series(rws)
|
||||
dst.ExpectedSeries(resultExpected)
|
||||
|
||||
if err := fillStorage(rws); err != nil {
|
||||
t.Fatalf("cannot add series to storage: %s", err)
|
||||
}
|
||||
|
||||
srcClient := &native.Client{
|
||||
AuthCfg: nil,
|
||||
Addr: src.URL(),
|
||||
ExtraLabels: []string{},
|
||||
HTTPClient: &http.Client{Transport: &http.Transport{DisableKeepAlives: false}},
|
||||
}
|
||||
dstClient := &native.Client{
|
||||
AuthCfg: nil,
|
||||
Addr: dst.URL(),
|
||||
ExtraLabels: []string{},
|
||||
HTTPClient: &http.Client{Transport: &http.Transport{DisableKeepAlives: false}},
|
||||
}
|
||||
|
||||
isSilent = true
|
||||
defer func() { isSilent = false }()
|
||||
|
||||
p := &vmNativeProcessor{
|
||||
filter: filter,
|
||||
dst: dstClient,
|
||||
src: srcClient,
|
||||
backoff: backoff.New(),
|
||||
cc: 1,
|
||||
isNative: true,
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
if err := p.run(ctx); err != nil {
|
||||
t.Fatalf("run() error: %s", err)
|
||||
}
|
||||
deleted, err := deleteSeries(matchName, matchValue)
|
||||
if err != nil {
|
||||
t.Fatalf("cannot delete series: %s", err)
|
||||
}
|
||||
if deleted != numOfSeries {
|
||||
t.Fatalf("unexpected number of deleted series; got %d; want %d", deleted, numOfSeries)
|
||||
}
|
||||
}
|
||||
|
||||
processFlags()
|
||||
vmstorage.Init(promql.ResetRollupResultCacheIfNeeded)
|
||||
|
@ -42,214 +116,78 @@ func Test_vmNativeProcessor_run(t *testing.T) {
|
|||
defer func() {
|
||||
barpool.Disable(false)
|
||||
}()
|
||||
defer func() { isSilent = false }()
|
||||
|
||||
type fields struct {
|
||||
filter native.Filter
|
||||
dst *native.Client
|
||||
src *native.Client
|
||||
backoff *backoff.Backoff
|
||||
s *stats
|
||||
rateLimit int64
|
||||
interCluster bool
|
||||
cc int
|
||||
matchName string
|
||||
matchValue string
|
||||
}
|
||||
type args struct {
|
||||
ctx context.Context
|
||||
silent bool
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
vmSeries func(start, end, numOfSeries, numOfSamples int64) []vm.TimeSeries
|
||||
expectedSeries []vm.TimeSeries
|
||||
start string
|
||||
end string
|
||||
numOfSamples int64
|
||||
numOfSeries int64
|
||||
chunk string
|
||||
wantErr bool
|
||||
}{
|
||||
// step minute on minute time range
|
||||
start := "2022-11-25T11:23:05+02:00"
|
||||
end := "2022-11-27T11:24:05+02:00"
|
||||
numOfSeries := 3
|
||||
numOfSamples := 2
|
||||
resultExpected := []vm.TimeSeries{
|
||||
{
|
||||
name: "step minute on minute time range",
|
||||
start: "2022-11-25T11:23:05+02:00",
|
||||
end: "2022-11-27T11:24:05+02:00",
|
||||
numOfSamples: 2,
|
||||
numOfSeries: 3,
|
||||
chunk: stepper.StepMinute,
|
||||
fields: fields{
|
||||
filter: native.Filter{},
|
||||
backoff: backoff.New(),
|
||||
rateLimit: 0,
|
||||
interCluster: false,
|
||||
cc: 1,
|
||||
matchName: "__name__",
|
||||
matchValue: ".*",
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
silent: true,
|
||||
},
|
||||
vmSeries: remote_read_integration.GenerateVNSeries,
|
||||
expectedSeries: []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1669368185000, 1669454615000},
|
||||
Values: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1669368185000, 1669454615000},
|
||||
Values: []float64{100, 100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1669368185000, 1669454615000},
|
||||
Values: []float64{200, 200},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1669368185000, 1669454615000},
|
||||
Values: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
name: "step month on month time range",
|
||||
start: "2022-09-26T11:23:05+02:00",
|
||||
end: "2022-11-26T11:24:05+02:00",
|
||||
numOfSamples: 2,
|
||||
numOfSeries: 3,
|
||||
chunk: stepper.StepMonth,
|
||||
fields: fields{
|
||||
filter: native.Filter{},
|
||||
backoff: backoff.New(),
|
||||
rateLimit: 0,
|
||||
interCluster: false,
|
||||
cc: 1,
|
||||
matchName: "__name__",
|
||||
matchValue: ".*",
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
silent: true,
|
||||
},
|
||||
vmSeries: remote_read_integration.GenerateVNSeries,
|
||||
expectedSeries: []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{200},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{200},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1669368185000, 1669454615000},
|
||||
Values: []float64{100, 100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1669368185000, 1669454615000},
|
||||
Values: []float64{200, 200},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
src := remote_read_integration.NewRemoteWriteServer(t)
|
||||
dst := remote_read_integration.NewRemoteWriteServer(t)
|
||||
f(start, end, numOfSeries, numOfSamples, resultExpected)
|
||||
|
||||
defer func() {
|
||||
src.Close()
|
||||
dst.Close()
|
||||
}()
|
||||
|
||||
start, err := time.Parse(time.RFC3339, tt.start)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parse start time: %s", err)
|
||||
}
|
||||
|
||||
end, err := time.Parse(time.RFC3339, tt.end)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parse end time: %s", err)
|
||||
}
|
||||
|
||||
tt.fields.filter.Match = fmt.Sprintf("{%s=~%q}", tt.fields.matchName, tt.fields.matchValue)
|
||||
tt.fields.filter.TimeStart = tt.start
|
||||
tt.fields.filter.TimeEnd = tt.end
|
||||
|
||||
rws := tt.vmSeries(start.Unix(), end.Unix(), tt.numOfSeries, tt.numOfSamples)
|
||||
|
||||
src.Series(rws)
|
||||
dst.ExpectedSeries(tt.expectedSeries)
|
||||
|
||||
if err := fillStorage(rws); err != nil {
|
||||
t.Fatalf("error add series to storage: %s", err)
|
||||
}
|
||||
|
||||
tt.fields.src = &native.Client{
|
||||
AuthCfg: nil,
|
||||
Addr: src.URL(),
|
||||
ExtraLabels: []string{},
|
||||
HTTPClient: &http.Client{Transport: &http.Transport{DisableKeepAlives: false}},
|
||||
}
|
||||
tt.fields.dst = &native.Client{
|
||||
AuthCfg: nil,
|
||||
Addr: dst.URL(),
|
||||
ExtraLabels: []string{},
|
||||
HTTPClient: &http.Client{Transport: &http.Transport{DisableKeepAlives: false}},
|
||||
}
|
||||
|
||||
isSilent = tt.args.silent
|
||||
p := &vmNativeProcessor{
|
||||
filter: tt.fields.filter,
|
||||
dst: tt.fields.dst,
|
||||
src: tt.fields.src,
|
||||
backoff: tt.fields.backoff,
|
||||
s: tt.fields.s,
|
||||
rateLimit: tt.fields.rateLimit,
|
||||
interCluster: tt.fields.interCluster,
|
||||
cc: tt.fields.cc,
|
||||
isNative: true,
|
||||
}
|
||||
|
||||
if err := p.run(tt.args.ctx); (err != nil) != tt.wantErr {
|
||||
t.Errorf("run() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
deleted, err := deleteSeries(tt.fields.matchName, tt.fields.matchValue)
|
||||
if err != nil {
|
||||
t.Fatalf("error delete series: %s", err)
|
||||
}
|
||||
if int64(deleted) != tt.numOfSeries {
|
||||
t.Fatalf("expected deleted series %d; got deleted series %d", tt.numOfSeries, deleted)
|
||||
}
|
||||
})
|
||||
// step month on month time range
|
||||
start = "2022-09-26T11:23:05+02:00"
|
||||
end = "2022-11-26T11:24:05+02:00"
|
||||
numOfSeries = 3
|
||||
numOfSamples = 2
|
||||
resultExpected = []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{200},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{200},
|
||||
},
|
||||
}
|
||||
f(start, end, numOfSeries, numOfSamples, resultExpected)
|
||||
}
|
||||
|
||||
func processFlags() {
|
||||
|
@ -311,95 +249,57 @@ func deleteSeries(name, value string) (int, error) {
|
|||
return vmstorage.DeleteSeries(nil, []*storage.TagFilters{tfs})
|
||||
}
|
||||
|
||||
func Test_buildMatchWithFilter(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filter string
|
||||
metricName string
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "parsed metric with label",
|
||||
filter: `{__name__="http_request_count_total",cluster="kube1"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{cluster="kube1",__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "metric name with label",
|
||||
filter: `http_request_count_total{cluster="kube1"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{cluster="kube1",__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parsed metric with regexp value",
|
||||
filter: `{__name__="http_request_count_total",cluster=~"kube.*"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{cluster=~"kube.*",__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "only label with regexp",
|
||||
filter: `{cluster=~".*"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{cluster=~".*",__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "many labels in filter with regexp",
|
||||
filter: `{cluster=~".*",job!=""}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{cluster=~".*",job!="",__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "match with error",
|
||||
filter: `{cluster~=".*"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: ``,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "all names",
|
||||
filter: `{__name__!=""}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with many underscores labels",
|
||||
filter: `{__name__!="", __meta__!=""}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{__meta__!="",__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "metric name has regexp",
|
||||
filter: `{__name__=~".*"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "metric name has negative regexp",
|
||||
filter: `{__name__!~".*"}`,
|
||||
metricName: "http_request_count_total",
|
||||
want: `{__name__="http_request_count_total"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := buildMatchWithFilter(tt.filter, tt.metricName)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("buildMatchWithFilter() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("buildMatchWithFilter() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
func TestBuildMatchWithFilter_Failure(t *testing.T) {
|
||||
f := func(filter, metricName string) {
|
||||
t.Helper()
|
||||
|
||||
_, err := buildMatchWithFilter(filter, metricName)
|
||||
if err == nil {
|
||||
t.Fatalf("expecting non-nil error")
|
||||
}
|
||||
}
|
||||
|
||||
// match with error
|
||||
f(`{cluster~=".*"}`, "http_request_count_total")
|
||||
}
|
||||
|
||||
func TestBuildMatchWithFilter_Success(t *testing.T) {
|
||||
f := func(filter, metricName, resultExpected string) {
|
||||
t.Helper()
|
||||
|
||||
result, err := buildMatchWithFilter(filter, metricName)
|
||||
if err != nil {
|
||||
t.Fatalf("buildMatchWithFilter() error: %s", err)
|
||||
}
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result\ngot\n%s\nwant\n%s", result, resultExpected)
|
||||
}
|
||||
}
|
||||
|
||||
// parsed metric with label
|
||||
f(`{__name__="http_request_count_total",cluster="kube1"}`, "http_request_count_total", `{cluster="kube1",__name__="http_request_count_total"}`)
|
||||
|
||||
// metric name with label
|
||||
f(`http_request_count_total{cluster="kube1"}`, "http_request_count_total", `{cluster="kube1",__name__="http_request_count_total"}`)
|
||||
|
||||
// parsed metric with regexp value
|
||||
f(`{__name__="http_request_count_total",cluster=~"kube.*"}`, "http_request_count_total", `{cluster=~"kube.*",__name__="http_request_count_total"}`)
|
||||
|
||||
// only label with regexp
|
||||
f(`{cluster=~".*"}`, "http_request_count_total", `{cluster=~".*",__name__="http_request_count_total"}`)
|
||||
|
||||
// many labels in filter with regexp
|
||||
f(`{cluster=~".*",job!=""}`, "http_request_count_total", `{cluster=~".*",job!="",__name__="http_request_count_total"}`)
|
||||
|
||||
// all names
|
||||
f(`{__name__!=""}`, "http_request_count_total", `{__name__="http_request_count_total"}`)
|
||||
|
||||
// with many underscores labels
|
||||
f(`{__name__!="", __meta__!=""}`, "http_request_count_total", `{__meta__!="",__name__="http_request_count_total"}`)
|
||||
|
||||
// metric name has regexp
|
||||
f(`{__name__=~".*"}`, "http_request_count_total", `{__name__="http_request_count_total"}`)
|
||||
|
||||
// metric name has negative regexp
|
||||
f(`{__name__!~".*"}`, "http_request_count_total", `{__name__="http_request_count_total"}`)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue