This commit is contained in:
Aliaksandr Valialkin 2024-05-18 22:09:52 +02:00
parent 13534e7bf6
commit 94d4f34cbf
No known key found for this signature in database
GPG key ID: 52C003EE2BCDB9EB
4 changed files with 52 additions and 20 deletions

View file

@ -562,19 +562,27 @@ func parseGenericFilter(lex *lexer, fieldName string) (filter, error) {
case lex.isKeyword(",", ")", "[", "]"):
return nil, fmt.Errorf("unexpected token %q", lex.token)
}
phrase := getCompoundPhrase(lex, fieldName != "")
phrase, err := getCompoundPhrase(lex, fieldName != "")
if err != nil {
return nil, err
}
return parseFilterForPhrase(lex, phrase, fieldName)
}
func getCompoundPhrase(lex *lexer, allowColon bool) string {
func getCompoundPhrase(lex *lexer, allowColon bool) (string, error) {
stopTokens := []string{"*", ",", "(", ")", "[", "]", "|", ""}
if lex.isKeyword(stopTokens...) {
return "", fmt.Errorf("compound phrase cannot start with '%s'", lex.token)
}
phrase := lex.token
rawPhrase := lex.rawToken
lex.nextToken()
suffix := getCompoundSuffix(lex, allowColon)
if suffix == "" {
return phrase
return phrase, nil
}
return rawPhrase + suffix
return rawPhrase + suffix, nil
}
func getCompoundSuffix(lex *lexer, allowColon bool) string {
@ -590,19 +598,24 @@ func getCompoundSuffix(lex *lexer, allowColon bool) string {
return s
}
func getCompoundToken(lex *lexer) string {
func getCompoundToken(lex *lexer) (string, error) {
stopTokens := []string{",", "(", ")", "[", "]", "|", ""}
if lex.isKeyword(stopTokens...) {
return "", fmt.Errorf("compound token cannot start with '%s'", lex.token)
}
s := lex.token
rawS := lex.rawToken
lex.nextToken()
suffix := ""
for !lex.isSkippedSpace && !lex.isKeyword(",", "(", ")", "[", "]", "|", "") {
for !lex.isSkippedSpace && !lex.isKeyword(stopTokens...) {
s += lex.token
lex.nextToken()
}
if suffix == "" {
return s
return s, nil
}
return rawS + suffix
return rawS + suffix, nil
}
func getCompoundFuncArg(lex *lexer) string {
@ -1081,7 +1094,10 @@ func parseFilterRange(lex *lexer, fieldName string) (filter, error) {
}
func parseFloat64(lex *lexer) (float64, string, error) {
s := getCompoundToken(lex)
s, err := getCompoundToken(lex)
if err != nil {
return 0, "", fmt.Errorf("cannot parse float64: %w", err)
}
f, err := strconv.ParseFloat(s, 64)
if err == nil {
return f, s, nil
@ -1168,13 +1184,14 @@ func parseFilterTimeWithOffset(lex *lexer) (*filterTime, error) {
if !lex.isKeyword("offset") {
return ft, nil
}
if !lex.mustNextToken() {
return nil, fmt.Errorf("missing offset for _time filter %s", ft)
lex.nextToken()
s, err := getCompoundToken(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse offset in _time filter: %w", err)
}
s := getCompoundToken(lex)
d, ok := tryParseDuration(s)
if !ok {
return nil, fmt.Errorf("cannot parse offset %q for _time filter %s: %w", s, ft, err)
return nil, fmt.Errorf("cannot parse offset %q for _time filter %s", s, ft)
}
offset := int64(d)
ft.minTimestamp -= offset
@ -1191,7 +1208,10 @@ func parseFilterTime(lex *lexer) (*filterTime, error) {
case lex.isKeyword("("):
startTimeInclude = false
default:
s := getCompoundToken(lex)
s, err := getCompoundToken(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse _time filter: %w", err)
}
sLower := strings.ToLower(s)
if sLower == "now" || startsWithYear(s) {
// Parse '_time:YYYY-MM-DD', which transforms to '_time:[YYYY-MM-DD, YYYY-MM-DD+1)'
@ -1343,7 +1363,10 @@ func parseFilterStream(lex *lexer) (*filterStream, error) {
}
func parseTime(lex *lexer) (int64, string, error) {
s := getCompoundToken(lex)
s, err := getCompoundToken(lex)
if err != nil {
return 0, "", err
}
t, err := promutils.ParseTimeAt(s, float64(lex.currentTimestamp)/1e9)
if err != nil {
return 0, "", err

View file

@ -1215,6 +1215,11 @@ func TestParseQueryFailure(t *testing.T) {
f(`foo | fields bar,,`)
// invalid field_names
f(`foo | field_names`)
f(`foo | field_names |`)
f(`foo | field_names (`)
f(`foo | field_names )`)
f(`foo | field_names ,`)
f(`foo | field_names ()`)
f(`foo | field_names (x)`)
f(`foo | field_names (x,y)`)

View file

@ -152,7 +152,10 @@ func parsePipeFieldNames(lex *lexer) (*pipeFieldNames, error) {
if lex.isKeyword("as") {
lex.nextToken()
}
resultName := getCompoundPhrase(lex, false)
resultName, err := parseFieldName(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse result name for 'field_names': %w", err)
}
pf := &pipeFieldNames{
resultName: resultName,

View file

@ -721,10 +721,11 @@ func parseByStatsFields(lex *lexer) ([]*byStatsField, error) {
lex.nextToken()
return bfs, nil
}
fieldName, err := parseFieldName(lex)
fieldName, err := getCompoundPhrase(lex, false)
if err != nil {
return nil, fmt.Errorf("cannot parse field name: %w", err)
}
fieldName = getCanonicalColumnName(fieldName)
bf := &byStatsField{
name: fieldName,
}
@ -898,10 +899,10 @@ func parseFieldNamesInParens(lex *lexer) ([]string, error) {
}
func parseFieldName(lex *lexer) (string, error) {
if lex.isKeyword(",", "(", ")", "[", "]", "|", ":", "") {
return "", fmt.Errorf("unexpected token: %q", lex.token)
fieldName, err := getCompoundToken(lex)
if err != nil {
return "", fmt.Errorf("cannot parse field name: %w", err)
}
fieldName := getCompoundPhrase(lex, false)
fieldName = getCanonicalColumnName(fieldName)
return fieldName, nil
}