Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix a crash when parsing nested function calls (see added test case). #28

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions sqlfmt/format_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -865,4 +865,12 @@ FROM xxx`,
LOCK table
IN xxx`,
},
{
src: `select true from m where t < date_trunc('DAY', to_timestamp('2022-01-01'))`,
want: `
SELECT
true
FROM m
WHERE t < DATE_TRUNC('DAY', TO_TIMESTAMP('2022-01-01'))`,
},
}
2 changes: 2 additions & 0 deletions sqlfmt/lexer/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ const (
OVERLAPS
NATURAL
CROSS
TIME
ZONE
NULLS
LAST
Expand All @@ -93,6 +94,7 @@ const (
)

// TokenType is an alias type that represents a kind of token
//go:generate stringer -type=TokenType
type TokenType int

// Token is a token struct
Expand Down
84 changes: 35 additions & 49 deletions sqlfmt/lexer/tokenizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -315,16 +315,24 @@ func (t *Tokenizer) append(v string) {
}

func (t *Tokenizer) isSQLKeyWord(v string) (TokenType, bool) {
if ttype, ok := sqlKeywordMap[v]; ok {
return ttype, ok
} else if ttype, ok := typeWithParenMap[v]; ok {
if r, _, err := t.r.ReadRune(); err == nil && string(r) == StartParenthesis {
t.unread()
return ttype, ok
if r, _, err := t.r.ReadRune(); err == nil && string(r) == StartParenthesis {
t.unread()

if ttype, ok := typeWithParenMap[v]; ok {
return ttype, true
} else {
// Assume everything else are functions (either from standards, or vendor specific)
return FUNCTION, true
}
} else {
t.unread()
return IDENT, ok
}

// Keywords will be formatted in capital cases
if ttype, ok := sqlKeywordMap[v]; ok {
return ttype, ok
}

return IDENT, false
}

Expand Down Expand Up @@ -376,6 +384,7 @@ var sqlKeywordMap = map[string]TokenType{
"FILTER": FILTER,
"WITHIN": WITHIN,
"COLLATE": COLLATE,
"INTERVAL": INTERVAL,
"INTERSECT": INTERSECT,
"EXCEPT": EXCEPT,
"OFFSET": OFFSET,
Expand All @@ -386,6 +395,7 @@ var sqlKeywordMap = map[string]TokenType{
"OVERLAPS": OVERLAPS,
"NATURAL": NATURAL,
"CROSS": CROSS,
"TIME": TIME,
"ZONE": ZONE,
"NULLS": NULLS,
"LAST": LAST,
Expand All @@ -395,46 +405,22 @@ var sqlKeywordMap = map[string]TokenType{
}

var typeWithParenMap = map[string]TokenType{
"SUM": FUNCTION,
"AVG": FUNCTION,
"MAX": FUNCTION,
"MIN": FUNCTION,
"COUNT": FUNCTION,
"COALESCE": FUNCTION,
"EXTRACT": FUNCTION,
"OVERLAY": FUNCTION,
"POSITION": FUNCTION,
"CAST": FUNCTION,
"SUBSTRING": FUNCTION,
"TRIM": FUNCTION,
"XMLELEMENT": FUNCTION,
"XMLFOREST": FUNCTION,
"XMLCONCAT": FUNCTION,
"RANDOM": FUNCTION,
"DATE_PART": FUNCTION,
"DATE_TRUNC": FUNCTION,
"ARRAY_AGG": FUNCTION,
"PERCENTILE_DISC": FUNCTION,
"GREATEST": FUNCTION,
"LEAST": FUNCTION,
"OVER": FUNCTION,
"ROW_NUMBER": FUNCTION,
"BIG": TYPE,
"BIGSERIAL": TYPE,
"BOOLEAN": TYPE,
"CHAR": TYPE,
"BIT": TYPE,
"TEXT": TYPE,
"INTEGER": TYPE,
"NUMERIC": TYPE,
"DECIMAL": TYPE,
"DEC": TYPE,
"FLOAT": TYPE,
"CUSTOMTYPE": TYPE,
"VARCHAR": TYPE,
"VARBIT": TYPE,
"TIMESTAMP": TYPE,
"TIME": TYPE,
"SECOND": TYPE,
"INTERVAL": TYPE,
"BIG": TYPE,
"BIGSERIAL": TYPE,
"BOOLEAN": TYPE,
"CHAR": TYPE,
"BIT": TYPE,
"TEXT": TYPE,
"INTEGER": TYPE,
"NUMERIC": TYPE,
"DECIMAL": TYPE,
"DEC": TYPE,
"FLOAT": TYPE,
"CUSTOMTYPE": TYPE,
"VARCHAR": TYPE,
"VARBIT": TYPE,
"TIMESTAMP": TYPE,
"TIME": TYPE,
"SECOND": TYPE,
"INTERVAL": TYPE,
}
2 changes: 1 addition & 1 deletion sqlfmt/lexer/tokenizer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ func TestGetTokens(t *testing.T) {
{Type: COMMA, Value: ","},
{Type: IDENT, Value: "age"},
{Type: COMMA, Value: ","},
{Type: IDENT, Value: "SUM"},
{Type: IDENT, Value: "sum"},
{Type: COMMA, Value: ","},
{Type: FUNCTION, Value: "SUM"},
{Type: STARTPARENTHESIS, Value: "("},
Expand Down