mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-21 14:44:00 +00:00
lib/logstorage: allow specifying _time filter offset without time range
This is useful when builiding graphs on time ranges in the past.
(cherry picked from commit a98fb495c6
)
This commit is contained in:
parent
a4ea3b87d7
commit
f82cfa16bf
4 changed files with 41 additions and 8 deletions
|
@ -16,6 +16,7 @@ according to [these docs](https://docs.victoriametrics.com/victorialogs/quicksta
|
||||||
## tip
|
## tip
|
||||||
|
|
||||||
* FEATURE: [`join` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#join-pipe): add an ability to add prefix to all the log field names from the joined query, by using `| join by (<by_fields>) (<query>) prefix "some_prefix"` syntax.
|
* FEATURE: [`join` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#join-pipe): add an ability to add prefix to all the log field names from the joined query, by using `| join by (<by_fields>) (<query>) prefix "some_prefix"` syntax.
|
||||||
|
* FEATURE: [`_time` filter](https://docs.victoriametrics.com/victorialogs/logsql/#time-filter): allow specifying offset without time range. For example, `_time:offset 1d` matches all the logs until `now-1d` in the [`_time` field](https://docs.victoriametrics.com/victorialogs/keyconcepts/#time-field). This is useful when building graphs for time ranges with some offset in the past.
|
||||||
|
|
||||||
## [v0.41.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v0.41.0-victorialogs)
|
## [v0.41.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v0.41.0-victorialogs)
|
||||||
|
|
||||||
|
|
|
@ -316,6 +316,7 @@ For example, `_time:2023-10-20` matches all the logs for `2023-10-20` day accord
|
||||||
|
|
||||||
It is possible to specify generic offset for the selected time range by appending `offset` after the `_time` filter. Examples:
|
It is possible to specify generic offset for the selected time range by appending `offset` after the `_time` filter. Examples:
|
||||||
|
|
||||||
|
- `_time:offset 1h` matches logs until `now-1h`.
|
||||||
- `_time:5m offset 1h` matches logs on the time range `(now-1h5m, now-1h]`.
|
- `_time:5m offset 1h` matches logs on the time range `(now-1h5m, now-1h]`.
|
||||||
- `_time:2023-07Z offset 5h30m` matches logs on July, 2023 by UTC with offset 5h30m.
|
- `_time:2023-07Z offset 5h30m` matches logs on July, 2023 by UTC with offset 5h30m.
|
||||||
- `_time:[2023-02-01Z, 2023-03-01Z) offset 1w` matches logs the week before the time range `[2023-02-01Z, 2023-03-01Z)` by UTC.
|
- `_time:[2023-02-01Z, 2023-03-01Z) offset 1w` matches logs the week before the time range `[2023-02-01Z, 2023-03-01Z)` by UTC.
|
||||||
|
|
|
@ -2070,6 +2070,20 @@ func getWeekRangeArg(lex *lexer) (time.Weekday, string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseFilterTimeRange(lex *lexer) (*filterTime, error) {
|
func parseFilterTimeRange(lex *lexer) (*filterTime, error) {
|
||||||
|
if lex.isKeyword("offset") {
|
||||||
|
ft := &filterTime{
|
||||||
|
minTimestamp: math.MinInt64,
|
||||||
|
maxTimestamp: lex.currentTimestamp,
|
||||||
|
}
|
||||||
|
offset, offsetStr, err := parseTimeOffset(lex)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot parse offset for _time filter []: %w", err)
|
||||||
|
}
|
||||||
|
ft.maxTimestamp -= offset
|
||||||
|
ft.stringRepr = offsetStr
|
||||||
|
return ft, nil
|
||||||
|
}
|
||||||
|
|
||||||
ft, err := parseFilterTime(lex)
|
ft, err := parseFilterTime(lex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -2077,20 +2091,33 @@ func parseFilterTimeRange(lex *lexer) (*filterTime, error) {
|
||||||
if !lex.isKeyword("offset") {
|
if !lex.isKeyword("offset") {
|
||||||
return ft, nil
|
return ft, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
offset, offsetStr, err := parseTimeOffset(lex)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot parse offset for _time filter [%s]: %w", ft, err)
|
||||||
|
}
|
||||||
|
ft.minTimestamp -= offset
|
||||||
|
ft.maxTimestamp -= offset
|
||||||
|
ft.stringRepr += " " + offsetStr
|
||||||
|
return ft, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTimeOffset(lex *lexer) (int64, string, error) {
|
||||||
|
if !lex.isKeyword("offset") {
|
||||||
|
return 0, "", fmt.Errorf("unexpected token %q; want 'offset'", lex.token)
|
||||||
|
}
|
||||||
lex.nextToken()
|
lex.nextToken()
|
||||||
|
|
||||||
s, err := getCompoundToken(lex)
|
s, err := getCompoundToken(lex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot parse offset in _time filter: %w", err)
|
return 0, "", err
|
||||||
}
|
}
|
||||||
d, ok := tryParseDuration(s)
|
d, ok := tryParseDuration(s)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("cannot parse offset %q for _time filter %s", s, ft)
|
return 0, "", fmt.Errorf("cannot parse duration [%s]", s)
|
||||||
}
|
}
|
||||||
offset := int64(d)
|
offset := int64(d)
|
||||||
ft.minTimestamp -= offset
|
return offset, "offset " + s, nil
|
||||||
ft.maxTimestamp -= offset
|
|
||||||
ft.stringRepr += " offset " + s
|
|
||||||
return ft, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseFilterTime(lex *lexer) (*filterTime, error) {
|
func parseFilterTime(lex *lexer) (*filterTime, error) {
|
||||||
|
|
|
@ -761,7 +761,9 @@ func TestParseQuerySuccess(t *testing.T) {
|
||||||
f(`_time:[2023-01-05, 2023-01-06) OFFset 5m`, `_time:[2023-01-05,2023-01-06) offset 5m`)
|
f(`_time:[2023-01-05, 2023-01-06) OFFset 5m`, `_time:[2023-01-05,2023-01-06) offset 5m`)
|
||||||
f(`_time:(2023-01-05, 2023-01-06] OFFset 5m`, `_time:(2023-01-05,2023-01-06] offset 5m`)
|
f(`_time:(2023-01-05, 2023-01-06] OFFset 5m`, `_time:(2023-01-05,2023-01-06] offset 5m`)
|
||||||
f(`_time:(2023-01-05, 2023-01-06) OFFset 5m`, `_time:(2023-01-05,2023-01-06) offset 5m`)
|
f(`_time:(2023-01-05, 2023-01-06) OFFset 5m`, `_time:(2023-01-05,2023-01-06) offset 5m`)
|
||||||
f(`_time:1h offset 5m`, `_time:1h offset 5m`)
|
f(`_time:1h offset 5.3m`, `_time:1h offset 5.3m`)
|
||||||
|
f(`_time:offset 1d`, `_time:offset 1d`)
|
||||||
|
f(`_time:offset -1.5d`, `_time:offset -1.5d`)
|
||||||
f(`_time:1h "offSet"`, `_time:1h "offSet"`) // "offset" is a search word, since it is quoted
|
f(`_time:1h "offSet"`, `_time:1h "offSet"`) // "offset" is a search word, since it is quoted
|
||||||
f(`_time:1h (Offset)`, `_time:1h "Offset"`) // "offset" is a search word, since it is in parens
|
f(`_time:1h (Offset)`, `_time:1h "Offset"`) // "offset" is a search word, since it is in parens
|
||||||
f(`_time:1h "and"`, `_time:1h "and"`) // "and" is a search word, since it is quoted
|
f(`_time:1h "and"`, `_time:1h "and"`) // "and" is a search word, since it is quoted
|
||||||
|
@ -1356,6 +1358,8 @@ func TestParseQueryFailure(t *testing.T) {
|
||||||
f("_time:234foo")
|
f("_time:234foo")
|
||||||
f("_time:5m offset")
|
f("_time:5m offset")
|
||||||
f("_time:10m offset foobar")
|
f("_time:10m offset foobar")
|
||||||
|
f("_time:offset")
|
||||||
|
f("_time:offset foobar")
|
||||||
|
|
||||||
// invalid day_range filters
|
// invalid day_range filters
|
||||||
f("_time:day_range")
|
f("_time:day_range")
|
||||||
|
@ -1373,7 +1377,7 @@ func TestParseQueryFailure(t *testing.T) {
|
||||||
f("_time:week_range[Mon,")
|
f("_time:week_range[Mon,")
|
||||||
f("_time:week_range[Mon,bar")
|
f("_time:week_range[Mon,bar")
|
||||||
f("_time:week_range[Mon,Fri")
|
f("_time:week_range[Mon,Fri")
|
||||||
f("_time:week_range[Mon,Fri] offset")
|
f("_time:week_range[Mon,Fri] offset foobar")
|
||||||
|
|
||||||
// long query with error
|
// long query with error
|
||||||
f(`very long query with error aaa ffdfd fdfdfd fdfd:( ffdfdfdfdfd`)
|
f(`very long query with error aaa ffdfd fdfdfd fdfd:( ffdfdfdfdfd`)
|
||||||
|
|
Loading…
Reference in a new issue