@@ -154,7 +154,7 @@ impl<'a> Parser<'a> {
154
154
expecting_statement_delimiter = false ;
155
155
}
156
156
157
- if parser. peek_token ( ) . token == Token :: EOF {
157
+ if parser. peek_token ( ) == Token :: EOF {
158
158
break ;
159
159
}
160
160
if expecting_statement_delimiter {
@@ -515,12 +515,12 @@ impl<'a> Parser<'a> {
515
515
Keyword :: INTERVAL => self . parse_interval ( ) ,
516
516
Keyword :: LISTAGG => self . parse_listagg_expr ( ) ,
517
517
// Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
518
- Keyword :: ARRAY if self . peek_token ( ) . token == Token :: LBracket => {
518
+ Keyword :: ARRAY if self . peek_token ( ) == Token :: LBracket => {
519
519
self . expect_token ( & Token :: LBracket ) ?;
520
520
self . parse_array_expr ( true )
521
521
}
522
522
Keyword :: ARRAY
523
- if self . peek_token ( ) . token == Token :: LParen
523
+ if self . peek_token ( ) == Token :: LParen
524
524
&& !dialect_of ! ( self is ClickHouseDialect ) =>
525
525
{
526
526
self . expect_token ( & Token :: LParen ) ?;
@@ -1574,30 +1574,30 @@ impl<'a> Parser<'a> {
1574
1574
// Can only happen if `get_next_precedence` got out of sync with this function
1575
1575
_ => parser_err ! ( format!( "No infix parser for token {:?}" , tok. token) ) ,
1576
1576
}
1577
- } else if Token :: DoubleColon == tok. token {
1577
+ } else if Token :: DoubleColon == tok {
1578
1578
self . parse_pg_cast ( expr)
1579
- } else if Token :: ExclamationMark == tok. token {
1579
+ } else if Token :: ExclamationMark == tok {
1580
1580
// PostgreSQL factorial operation
1581
1581
Ok ( Expr :: UnaryOp {
1582
1582
op : UnaryOperator :: PGPostfixFactorial ,
1583
1583
expr : Box :: new ( expr) ,
1584
1584
} )
1585
- } else if Token :: LBracket == tok. token {
1585
+ } else if Token :: LBracket == tok {
1586
1586
if dialect_of ! ( self is PostgreSqlDialect | GenericDialect ) {
1587
1587
// parse index
1588
1588
return self . parse_array_index ( expr) ;
1589
1589
}
1590
1590
self . parse_map_access ( expr)
1591
- } else if Token :: Colon == tok. token {
1591
+ } else if Token :: Colon == tok {
1592
1592
Ok ( Expr :: JsonAccess {
1593
1593
left : Box :: new ( expr) ,
1594
1594
operator : JsonOperator :: Colon ,
1595
1595
right : Box :: new ( Expr :: Value ( self . parse_value ( ) ?) ) ,
1596
1596
} )
1597
- } else if Token :: Arrow == tok. token
1598
- || Token :: LongArrow == tok. token
1599
- || Token :: HashArrow == tok. token
1600
- || Token :: HashLongArrow == tok. token
1597
+ } else if Token :: Arrow == tok
1598
+ || Token :: LongArrow == tok
1599
+ || Token :: HashArrow == tok
1600
+ || Token :: HashLongArrow == tok
1601
1601
{
1602
1602
let operator = match tok. token {
1603
1603
Token :: Arrow => JsonOperator :: Arrow ,
@@ -1962,7 +1962,7 @@ impl<'a> Parser<'a> {
1962
1962
/// Consume the next token if it matches the expected token, otherwise return false
1963
1963
#[ must_use]
1964
1964
pub fn consume_token ( & mut self , expected : & Token ) -> bool {
1965
- if self . peek_token ( ) . token == * expected {
1965
+ if self . peek_token ( ) == * expected {
1966
1966
self . next_token ( ) ;
1967
1967
true
1968
1968
} else {
@@ -2135,14 +2135,14 @@ impl<'a> Parser<'a> {
2135
2135
table_flag = Some ( self . parse_object_name ( ) ?) ;
2136
2136
if self . parse_keyword ( Keyword :: TABLE ) {
2137
2137
let table_name = self . parse_object_name ( ) ?;
2138
- if self . peek_token ( ) . token != Token :: EOF {
2138
+ if self . peek_token ( ) != Token :: EOF {
2139
2139
if let Token :: Word ( word) = self . peek_token ( ) . token {
2140
2140
if word. keyword == Keyword :: OPTIONS {
2141
2141
options = self . parse_options ( Keyword :: OPTIONS ) ?
2142
2142
}
2143
2143
} ;
2144
2144
2145
- if self . peek_token ( ) . token != Token :: EOF {
2145
+ if self . peek_token ( ) != Token :: EOF {
2146
2146
let ( a, q) = self . parse_as_query ( ) ?;
2147
2147
has_as = a;
2148
2148
query = Some ( q) ;
@@ -2165,7 +2165,7 @@ impl<'a> Parser<'a> {
2165
2165
} )
2166
2166
}
2167
2167
} else {
2168
- if self . peek_token ( ) . token == Token :: EOF {
2168
+ if self . peek_token ( ) == Token :: EOF {
2169
2169
self . prev_token ( ) ;
2170
2170
}
2171
2171
self . expected ( "a `TABLE` keyword" , self . peek_token ( ) )
@@ -3792,7 +3792,7 @@ impl<'a> Parser<'a> {
3792
3792
let next_token = self . next_token ( ) ;
3793
3793
match next_token. token {
3794
3794
Token :: Word ( Word { value, keyword, .. } ) if keyword == Keyword :: NoKeyword => {
3795
- if self . peek_token ( ) . token == Token :: LParen {
3795
+ if self . peek_token ( ) == Token :: LParen {
3796
3796
return self . parse_function ( ObjectName ( vec ! [ Ident :: new( value) ] ) ) ;
3797
3797
}
3798
3798
Ok ( Expr :: Value ( Value :: SingleQuotedString ( value) ) )
@@ -5849,8 +5849,7 @@ impl<'a> Parser<'a> {
5849
5849
pub fn parse_merge_clauses ( & mut self ) -> Result < Vec < MergeClause > , ParserError > {
5850
5850
let mut clauses: Vec < MergeClause > = vec ! [ ] ;
5851
5851
loop {
5852
- if self . peek_token ( ) . token == Token :: EOF || self . peek_token ( ) . token == Token :: SemiColon
5853
- {
5852
+ if self . peek_token ( ) == Token :: EOF || self . peek_token ( ) == Token :: SemiColon {
5854
5853
break ;
5855
5854
}
5856
5855
self . expect_keyword ( Keyword :: WHEN ) ?;
@@ -6069,19 +6068,19 @@ mod tests {
6069
6068
fn test_prev_index ( ) {
6070
6069
let sql = "SELECT version" ;
6071
6070
all_dialects ( ) . run_parser_method ( sql, |parser| {
6072
- assert_eq ! ( parser. peek_token( ) . token , Token :: make_keyword( "SELECT" ) ) ;
6073
- assert_eq ! ( parser. next_token( ) . token , Token :: make_keyword( "SELECT" ) ) ;
6071
+ assert_eq ! ( parser. peek_token( ) , Token :: make_keyword( "SELECT" ) ) ;
6072
+ assert_eq ! ( parser. next_token( ) , Token :: make_keyword( "SELECT" ) ) ;
6074
6073
parser. prev_token ( ) ;
6075
- assert_eq ! ( parser. next_token( ) . token , Token :: make_keyword( "SELECT" ) ) ;
6076
- assert_eq ! ( parser. next_token( ) . token , Token :: make_word( "version" , None ) ) ;
6074
+ assert_eq ! ( parser. next_token( ) , Token :: make_keyword( "SELECT" ) ) ;
6075
+ assert_eq ! ( parser. next_token( ) , Token :: make_word( "version" , None ) ) ;
6077
6076
parser. prev_token ( ) ;
6078
- assert_eq ! ( parser. peek_token( ) . token , Token :: make_word( "version" , None ) ) ;
6079
- assert_eq ! ( parser. next_token( ) . token , Token :: make_word( "version" , None ) ) ;
6080
- assert_eq ! ( parser. peek_token( ) . token , Token :: EOF ) ;
6077
+ assert_eq ! ( parser. peek_token( ) , Token :: make_word( "version" , None ) ) ;
6078
+ assert_eq ! ( parser. next_token( ) , Token :: make_word( "version" , None ) ) ;
6079
+ assert_eq ! ( parser. peek_token( ) , Token :: EOF ) ;
6081
6080
parser. prev_token ( ) ;
6082
- assert_eq ! ( parser. next_token( ) . token , Token :: make_word( "version" , None ) ) ;
6083
- assert_eq ! ( parser. next_token( ) . token , Token :: EOF ) ;
6084
- assert_eq ! ( parser. next_token( ) . token , Token :: EOF ) ;
6081
+ assert_eq ! ( parser. next_token( ) , Token :: make_word( "version" , None ) ) ;
6082
+ assert_eq ! ( parser. next_token( ) , Token :: EOF ) ;
6083
+ assert_eq ! ( parser. next_token( ) , Token :: EOF ) ;
6085
6084
parser. prev_token ( ) ;
6086
6085
} ) ;
6087
6086
}
0 commit comments