Skip to content

Commit 414f261

Browse files
committed
Add PartialEq
1 parent 25a1701 commit 414f261

File tree

2 files changed

+33
-28
lines changed

2 files changed

+33
-28
lines changed

src/parser.rs

+27-28
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ impl<'a> Parser<'a> {
154154
expecting_statement_delimiter = false;
155155
}
156156

157-
if parser.peek_token().token == Token::EOF {
157+
if parser.peek_token() == Token::EOF {
158158
break;
159159
}
160160
if expecting_statement_delimiter {
@@ -515,12 +515,12 @@ impl<'a> Parser<'a> {
515515
Keyword::INTERVAL => self.parse_interval(),
516516
Keyword::LISTAGG => self.parse_listagg_expr(),
517517
// Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
518-
Keyword::ARRAY if self.peek_token().token == Token::LBracket => {
518+
Keyword::ARRAY if self.peek_token() == Token::LBracket => {
519519
self.expect_token(&Token::LBracket)?;
520520
self.parse_array_expr(true)
521521
}
522522
Keyword::ARRAY
523-
if self.peek_token().token == Token::LParen
523+
if self.peek_token() == Token::LParen
524524
&& !dialect_of!(self is ClickHouseDialect) =>
525525
{
526526
self.expect_token(&Token::LParen)?;
@@ -1574,30 +1574,30 @@ impl<'a> Parser<'a> {
15741574
// Can only happen if `get_next_precedence` got out of sync with this function
15751575
_ => parser_err!(format!("No infix parser for token {:?}", tok.token)),
15761576
}
1577-
} else if Token::DoubleColon == tok.token {
1577+
} else if Token::DoubleColon == tok {
15781578
self.parse_pg_cast(expr)
1579-
} else if Token::ExclamationMark == tok.token {
1579+
} else if Token::ExclamationMark == tok {
15801580
// PostgreSQL factorial operation
15811581
Ok(Expr::UnaryOp {
15821582
op: UnaryOperator::PGPostfixFactorial,
15831583
expr: Box::new(expr),
15841584
})
1585-
} else if Token::LBracket == tok.token {
1585+
} else if Token::LBracket == tok {
15861586
if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
15871587
// parse index
15881588
return self.parse_array_index(expr);
15891589
}
15901590
self.parse_map_access(expr)
1591-
} else if Token::Colon == tok.token {
1591+
} else if Token::Colon == tok {
15921592
Ok(Expr::JsonAccess {
15931593
left: Box::new(expr),
15941594
operator: JsonOperator::Colon,
15951595
right: Box::new(Expr::Value(self.parse_value()?)),
15961596
})
1597-
} else if Token::Arrow == tok.token
1598-
|| Token::LongArrow == tok.token
1599-
|| Token::HashArrow == tok.token
1600-
|| Token::HashLongArrow == tok.token
1597+
} else if Token::Arrow == tok
1598+
|| Token::LongArrow == tok
1599+
|| Token::HashArrow == tok
1600+
|| Token::HashLongArrow == tok
16011601
{
16021602
let operator = match tok.token {
16031603
Token::Arrow => JsonOperator::Arrow,
@@ -1962,7 +1962,7 @@ impl<'a> Parser<'a> {
19621962
/// Consume the next token if it matches the expected token, otherwise return false
19631963
#[must_use]
19641964
pub fn consume_token(&mut self, expected: &Token) -> bool {
1965-
if self.peek_token().token == *expected {
1965+
if self.peek_token() == *expected {
19661966
self.next_token();
19671967
true
19681968
} else {
@@ -2135,14 +2135,14 @@ impl<'a> Parser<'a> {
21352135
table_flag = Some(self.parse_object_name()?);
21362136
if self.parse_keyword(Keyword::TABLE) {
21372137
let table_name = self.parse_object_name()?;
2138-
if self.peek_token().token != Token::EOF {
2138+
if self.peek_token() != Token::EOF {
21392139
if let Token::Word(word) = self.peek_token().token {
21402140
if word.keyword == Keyword::OPTIONS {
21412141
options = self.parse_options(Keyword::OPTIONS)?
21422142
}
21432143
};
21442144

2145-
if self.peek_token().token != Token::EOF {
2145+
if self.peek_token() != Token::EOF {
21462146
let (a, q) = self.parse_as_query()?;
21472147
has_as = a;
21482148
query = Some(q);
@@ -2165,7 +2165,7 @@ impl<'a> Parser<'a> {
21652165
})
21662166
}
21672167
} else {
2168-
if self.peek_token().token == Token::EOF {
2168+
if self.peek_token() == Token::EOF {
21692169
self.prev_token();
21702170
}
21712171
self.expected("a `TABLE` keyword", self.peek_token())
@@ -3792,7 +3792,7 @@ impl<'a> Parser<'a> {
37923792
let next_token = self.next_token();
37933793
match next_token.token {
37943794
Token::Word(Word { value, keyword, .. }) if keyword == Keyword::NoKeyword => {
3795-
if self.peek_token().token == Token::LParen {
3795+
if self.peek_token() == Token::LParen {
37963796
return self.parse_function(ObjectName(vec![Ident::new(value)]));
37973797
}
37983798
Ok(Expr::Value(Value::SingleQuotedString(value)))
@@ -5849,8 +5849,7 @@ impl<'a> Parser<'a> {
58495849
pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
58505850
let mut clauses: Vec<MergeClause> = vec![];
58515851
loop {
5852-
if self.peek_token().token == Token::EOF || self.peek_token().token == Token::SemiColon
5853-
{
5852+
if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon {
58545853
break;
58555854
}
58565855
self.expect_keyword(Keyword::WHEN)?;
@@ -6069,19 +6068,19 @@ mod tests {
60696068
fn test_prev_index() {
60706069
let sql = "SELECT version";
60716070
all_dialects().run_parser_method(sql, |parser| {
6072-
assert_eq!(parser.peek_token().token, Token::make_keyword("SELECT"));
6073-
assert_eq!(parser.next_token().token, Token::make_keyword("SELECT"));
6071+
assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
6072+
assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
60746073
parser.prev_token();
6075-
assert_eq!(parser.next_token().token, Token::make_keyword("SELECT"));
6076-
assert_eq!(parser.next_token().token, Token::make_word("version", None));
6074+
assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
6075+
assert_eq!(parser.next_token(), Token::make_word("version", None));
60776076
parser.prev_token();
6078-
assert_eq!(parser.peek_token().token, Token::make_word("version", None));
6079-
assert_eq!(parser.next_token().token, Token::make_word("version", None));
6080-
assert_eq!(parser.peek_token().token, Token::EOF);
6077+
assert_eq!(parser.peek_token(), Token::make_word("version", None));
6078+
assert_eq!(parser.next_token(), Token::make_word("version", None));
6079+
assert_eq!(parser.peek_token(), Token::EOF);
60816080
parser.prev_token();
6082-
assert_eq!(parser.next_token().token, Token::make_word("version", None));
6083-
assert_eq!(parser.next_token().token, Token::EOF);
6084-
assert_eq!(parser.next_token().token, Token::EOF);
6081+
assert_eq!(parser.next_token(), Token::make_word("version", None));
6082+
assert_eq!(parser.next_token(), Token::EOF);
6083+
assert_eq!(parser.next_token(), Token::EOF);
60856084
parser.prev_token();
60866085
});
60876086
}

src/tokenizer.rs

+6
Original file line numberDiff line numberDiff line change
@@ -335,6 +335,12 @@ impl PartialEq<Token> for TokenWithLocation {
335335
}
336336
}
337337

338+
impl PartialEq<TokenWithLocation> for Token {
339+
fn eq(&self, other: &TokenWithLocation) -> bool {
340+
self == &other.token
341+
}
342+
}
343+
338344
impl fmt::Display for TokenWithLocation {
339345
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
340346
self.token.fmt(f)

0 commit comments

Comments
 (0)