diff --git a/README.md b/README.md
index 1195cc941..454ea6c29 100644
--- a/README.md
+++ b/README.md
@@ -93,7 +93,7 @@ $ cargo run --features json_example --example cli FILENAME.sql [--dialectname]
## Users
This parser is currently being used by the [DataFusion] query engine,
-[LocustDB], [Ballista], [GlueSQL], and [Opteryx].
+[LocustDB], [Ballista], [GlueSQL], [Opteryx], and [JumpWire].
If your project is using sqlparser-rs feel free to make a PR to add it
to this list.
@@ -179,6 +179,7 @@ licensed as above, without any additional terms or conditions.
[Ballista]: https://github.com/apache/arrow-ballista
[GlueSQL]: https://github.com/gluesql/gluesql
[Opteryx]: https://github.com/mabel-dev/opteryx
+[JumpWire]: https://github.com/extragoodlabs/jumpwire
[Pratt Parser]: https://tdop.github.io/
[sql-2016-grammar]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html
[sql-standard]: https://en.wikipedia.org/wiki/ISO/IEC_9075
diff --git a/src/ast/mod.rs b/src/ast/mod.rs
index b9d73f82a..87f7ebb37 100644
--- a/src/ast/mod.rs
+++ b/src/ast/mod.rs
@@ -155,7 +155,7 @@ impl fmt::Display for Ident {
let escaped = value::escape_quoted_string(&self.value, q);
write!(f, "{q}{escaped}{q}")
}
- Some(q) if q == '[' => write!(f, "[{}]", self.value),
+ Some('[') => write!(f, "[{}]", self.value),
None => f.write_str(&self.value),
_ => panic!("unexpected quote style"),
}
@@ -579,7 +579,7 @@ pub enum Expr {
///
/// Syntax:
/// ```sql
- /// MARCH (
, , ...) AGAINST ( [])
+ /// MATCH (, , ...) AGAINST ( [])
///
/// = CompoundIdentifier
/// = String literal
@@ -1307,6 +1307,10 @@ pub enum Statement {
selection: Option,
/// RETURNING
returning: Option>,
+ /// ORDER BY (MySQL)
+ order_by: Vec,
+ /// LIMIT (MySQL)
+ limit: Option,
},
/// CREATE VIEW
CreateView {
@@ -1318,6 +1322,12 @@ pub enum Statement {
query: Box,
with_options: Vec,
cluster_by: Vec,
+ /// if true, has RedShift [`WITH NO SCHEMA BINDING`] clause
+ with_no_schema_binding: bool,
+ /// if true, has SQLite `IF NOT EXISTS` clause
+ if_not_exists: bool,
+ /// if true, has SQLite `TEMP` or `TEMPORARY` clause
+ temporary: bool,
},
/// CREATE TABLE
CreateTable {
@@ -1435,6 +1445,16 @@ pub enum Statement {
name: Ident,
operation: AlterRoleOperation,
},
+ /// ATTACH DATABASE 'path/to/file' AS alias
+ /// (SQLite-specific)
+ AttachDatabase {
+ /// The name to bind to the newly attached database
+ schema_name: Ident,
+ /// An expression that indicates the path to the database file
+ database_file_name: Expr,
+ /// true if the syntax is 'ATTACH DATABASE', false if it's just 'ATTACH'
+ database: bool,
+ },
/// DROP
Drop {
/// The type of the object to drop: TABLE, VIEW, etc.
@@ -1975,6 +1995,14 @@ impl fmt::Display for Statement {
}
Ok(())
}
+ Statement::AttachDatabase {
+ schema_name,
+ database_file_name,
+ database,
+ } => {
+ let keyword = if *database { "DATABASE " } else { "" };
+ write!(f, "ATTACH {keyword}{database_file_name} AS {schema_name}")
+ }
Statement::Analyze {
table_name,
partitions,
@@ -2129,6 +2157,8 @@ impl fmt::Display for Statement {
using,
selection,
returning,
+ order_by,
+ limit,
} => {
write!(f, "DELETE ")?;
if !tables.is_empty() {
@@ -2144,6 +2174,12 @@ impl fmt::Display for Statement {
if let Some(returning) = returning {
write!(f, " RETURNING {}", display_comma_separated(returning))?;
}
+ if !order_by.is_empty() {
+ write!(f, " ORDER BY {}", display_comma_separated(order_by))?;
+ }
+ if let Some(limit) = limit {
+ write!(f, " LIMIT {limit}")?;
+ }
Ok(())
}
Statement::Close { cursor } => {
@@ -2247,13 +2283,18 @@ impl fmt::Display for Statement {
materialized,
with_options,
cluster_by,
+ with_no_schema_binding,
+ if_not_exists,
+ temporary,
} => {
write!(
f,
- "CREATE {or_replace}{materialized}VIEW {name}",
+ "CREATE {or_replace}{materialized}{temporary}VIEW {if_not_exists}{name}",
or_replace = if *or_replace { "OR REPLACE " } else { "" },
materialized = if *materialized { "MATERIALIZED " } else { "" },
- name = name
+ name = name,
+ temporary = if *temporary { "TEMPORARY " } else { "" },
+ if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }
)?;
if !with_options.is_empty() {
write!(f, " WITH ({})", display_comma_separated(with_options))?;
@@ -2264,7 +2305,11 @@ impl fmt::Display for Statement {
if !cluster_by.is_empty() {
write!(f, " CLUSTER BY ({})", display_comma_separated(cluster_by))?;
}
- write!(f, " AS {query}")
+ write!(f, " AS {query}")?;
+ if *with_no_schema_binding {
+ write!(f, " WITH NO SCHEMA BINDING")?;
+ }
+ Ok(())
}
Statement::CreateTable {
name,
diff --git a/src/ast/query.rs b/src/ast/query.rs
index af35c37a3..88b0931de 100644
--- a/src/ast/query.rs
+++ b/src/ast/query.rs
@@ -35,6 +35,10 @@ pub struct Query {
pub order_by: Vec,
/// `LIMIT { | ALL }`
pub limit: Option,
+
+ /// `LIMIT { } BY { ,,... } }`
+ pub limit_by: Vec,
+
/// `OFFSET [ { ROW | ROWS } ]`
pub offset: Option,
/// `FETCH { FIRST | NEXT } [ PERCENT ] { ROW | ROWS } | { ONLY | WITH TIES }`
@@ -58,6 +62,9 @@ impl fmt::Display for Query {
if let Some(ref offset) = self.offset {
write!(f, " {offset}")?;
}
+ if !self.limit_by.is_empty() {
+ write!(f, " BY {}", display_separated(&self.limit_by, ", "))?;
+ }
if let Some(ref fetch) = self.fetch {
write!(f, " {fetch}")?;
}
@@ -713,13 +720,28 @@ pub enum TableFactor {
/// For example `FROM monthly_sales PIVOT(sum(amount) FOR MONTH IN ('JAN', 'FEB'))`
/// See
Pivot {
- #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))]
- name: ObjectName,
- table_alias: Option,
+ #[cfg_attr(feature = "visitor", visit(with = "visit_table_factor"))]
+ table: Box,
aggregate_function: Expr, // Function expression
value_column: Vec,
pivot_values: Vec,
- pivot_alias: Option,
+ alias: Option,
+ },
+ /// An UNPIVOT operation on a table.
+ ///
+ /// Syntax:
+ /// ```sql
+ /// table UNPIVOT(value FOR name IN (column1, [ column2, ... ])) [ alias ]
+ /// ```
+ ///
+ /// See .
+ Unpivot {
+ #[cfg_attr(feature = "visitor", visit(with = "visit_table_factor"))]
+ table: Box,
+ value: Ident,
+ name: Ident,
+ columns: Vec,
+ alias: Option,
},
}
@@ -803,32 +825,42 @@ impl fmt::Display for TableFactor {
Ok(())
}
TableFactor::Pivot {
- name,
- table_alias,
+ table,
aggregate_function,
value_column,
pivot_values,
- pivot_alias,
+ alias,
} => {
- write!(f, "{}", name)?;
- if table_alias.is_some() {
- write!(f, " AS {}", table_alias.as_ref().unwrap())?;
- }
write!(
f,
- " PIVOT({} FOR {} IN (",
+ "{} PIVOT({} FOR {} IN ({}))",
+ table,
aggregate_function,
- Expr::CompoundIdentifier(value_column.to_vec())
+ Expr::CompoundIdentifier(value_column.to_vec()),
+ display_comma_separated(pivot_values)
)?;
- for value in pivot_values {
- write!(f, "{}", value)?;
- if !value.eq(pivot_values.last().unwrap()) {
- write!(f, ", ")?;
- }
+ if alias.is_some() {
+ write!(f, " AS {}", alias.as_ref().unwrap())?;
}
- write!(f, "))")?;
- if pivot_alias.is_some() {
- write!(f, " AS {}", pivot_alias.as_ref().unwrap())?;
+ Ok(())
+ }
+ TableFactor::Unpivot {
+ table,
+ value,
+ name,
+ columns,
+ alias,
+ } => {
+ write!(
+ f,
+ "{} UNPIVOT({} FOR {} IN ({}))",
+ table,
+ value,
+ name,
+ display_comma_separated(columns)
+ )?;
+ if alias.is_some() {
+ write!(f, " AS {}", alias.as_ref().unwrap())?;
}
Ok(())
}
diff --git a/src/ast/value.rs b/src/ast/value.rs
index 9c18a325c..e6f139256 100644
--- a/src/ast/value.rs
+++ b/src/ast/value.rs
@@ -117,6 +117,8 @@ pub enum DateTimeField {
Month,
Week,
Day,
+ DayOfWeek,
+ DayOfYear,
Date,
Hour,
Minute,
@@ -127,6 +129,7 @@ pub enum DateTimeField {
Doy,
Epoch,
Isodow,
+ IsoWeek,
Isoyear,
Julian,
Microsecond,
@@ -138,6 +141,7 @@ pub enum DateTimeField {
Nanosecond,
Nanoseconds,
Quarter,
+ Time,
Timezone,
TimezoneHour,
TimezoneMinute,
@@ -151,6 +155,8 @@ impl fmt::Display for DateTimeField {
DateTimeField::Month => "MONTH",
DateTimeField::Week => "WEEK",
DateTimeField::Day => "DAY",
+ DateTimeField::DayOfWeek => "DAYOFWEEK",
+ DateTimeField::DayOfYear => "DAYOFYEAR",
DateTimeField::Date => "DATE",
DateTimeField::Hour => "HOUR",
DateTimeField::Minute => "MINUTE",
@@ -162,6 +168,7 @@ impl fmt::Display for DateTimeField {
DateTimeField::Epoch => "EPOCH",
DateTimeField::Isodow => "ISODOW",
DateTimeField::Isoyear => "ISOYEAR",
+ DateTimeField::IsoWeek => "ISOWEEK",
DateTimeField::Julian => "JULIAN",
DateTimeField::Microsecond => "MICROSECOND",
DateTimeField::Microseconds => "MICROSECONDS",
@@ -172,6 +179,7 @@ impl fmt::Display for DateTimeField {
DateTimeField::Nanosecond => "NANOSECOND",
DateTimeField::Nanoseconds => "NANOSECONDS",
DateTimeField::Quarter => "QUARTER",
+ DateTimeField::Time => "TIME",
DateTimeField::Timezone => "TIMEZONE",
DateTimeField::TimezoneHour => "TIMEZONE_HOUR",
DateTimeField::TimezoneMinute => "TIMEZONE_MINUTE",
diff --git a/src/ast/visitor.rs b/src/ast/visitor.rs
index bb7c19678..09cb20a0c 100644
--- a/src/ast/visitor.rs
+++ b/src/ast/visitor.rs
@@ -490,7 +490,7 @@ where
///
/// This demonstrates how to effectively replace an expression with another more complicated one
/// that references the original. This example avoids unnecessary allocations by using the
-/// [`std::mem`](std::mem) family of functions.
+/// [`std::mem`] family of functions.
///
/// ```
/// # use sqlparser::parser::Parser;
diff --git a/src/keywords.rs b/src/keywords.rs
index ad0526ccd..e1bbf44ae 100644
--- a/src/keywords.rs
+++ b/src/keywords.rs
@@ -95,6 +95,7 @@ define_keywords!(
ASYMMETRIC,
AT,
ATOMIC,
+ ATTACH,
AUTHORIZATION,
AUTOINCREMENT,
AUTO_INCREMENT,
@@ -109,6 +110,7 @@ define_keywords!(
BIGINT,
BIGNUMERIC,
BINARY,
+ BINDING,
BLOB,
BLOOMFILTER,
BOOL,
@@ -195,6 +197,8 @@ define_keywords!(
DATE,
DATETIME,
DAY,
+ DAYOFWEEK,
+ DAYOFYEAR,
DEALLOCATE,
DEC,
DECADE,
@@ -333,6 +337,7 @@ define_keywords!(
IS,
ISODOW,
ISOLATION,
+ ISOWEEK,
ISOYEAR,
JAR,
JOIN,
@@ -631,6 +636,7 @@ define_keywords!(
UNKNOWN,
UNLOGGED,
UNNEST,
+ UNPIVOT,
UNSIGNED,
UNTIL,
UPDATE,
@@ -689,6 +695,7 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[
Keyword::HAVING,
Keyword::ORDER,
Keyword::PIVOT,
+ Keyword::UNPIVOT,
Keyword::TOP,
Keyword::LATERAL,
Keyword::VIEW,
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
index f18c7e1a1..95f1f8edc 100644
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -456,6 +456,7 @@ impl<'a> Parser<'a> {
Ok(Statement::Query(Box::new(self.parse_query()?)))
}
Keyword::TRUNCATE => Ok(self.parse_truncate()?),
+ Keyword::ATTACH => Ok(self.parse_attach_database()?),
Keyword::MSCK => Ok(self.parse_msck()?),
Keyword::CREATE => Ok(self.parse_create()?),
Keyword::CACHE => Ok(self.parse_cache_table()?),
@@ -543,6 +544,18 @@ impl<'a> Parser<'a> {
})
}
+ pub fn parse_attach_database(&mut self) -> Result {
+ let database = self.parse_keyword(Keyword::DATABASE);
+ let database_file_name = self.parse_expr()?;
+ self.expect_keyword(Keyword::AS)?;
+ let schema_name = self.parse_identifier()?;
+ Ok(Statement::AttachDatabase {
+ database,
+ schema_name,
+ database_file_name,
+ })
+ }
+
pub fn parse_analyze(&mut self) -> Result {
self.expect_keyword(Keyword::TABLE)?;
let table_name = self.parse_object_name()?;
@@ -1509,6 +1522,8 @@ impl<'a> Parser<'a> {
Keyword::MONTH => Ok(DateTimeField::Month),
Keyword::WEEK => Ok(DateTimeField::Week),
Keyword::DAY => Ok(DateTimeField::Day),
+ Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
+ Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
Keyword::DATE => Ok(DateTimeField::Date),
Keyword::HOUR => Ok(DateTimeField::Hour),
Keyword::MINUTE => Ok(DateTimeField::Minute),
@@ -1520,6 +1535,7 @@ impl<'a> Parser<'a> {
Keyword::EPOCH => Ok(DateTimeField::Epoch),
Keyword::ISODOW => Ok(DateTimeField::Isodow),
Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
+ Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
Keyword::JULIAN => Ok(DateTimeField::Julian),
Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
@@ -1530,6 +1546,7 @@ impl<'a> Parser<'a> {
Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
Keyword::QUARTER => Ok(DateTimeField::Quarter),
+ Keyword::TIME => Ok(DateTimeField::Time),
Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
@@ -2475,7 +2492,7 @@ impl<'a> Parser<'a> {
self.parse_create_table(or_replace, temporary, global, transient)
} else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
self.prev_token();
- self.parse_create_view(or_replace)
+ self.parse_create_view(or_replace, temporary)
} else if self.parse_keyword(Keyword::EXTERNAL) {
self.parse_create_external_table(or_replace)
} else if self.parse_keyword(Keyword::FUNCTION) {
@@ -2952,9 +2969,15 @@ impl<'a> Parser<'a> {
}
}
- pub fn parse_create_view(&mut self, or_replace: bool) -> Result {
+ pub fn parse_create_view(
+ &mut self,
+ or_replace: bool,
+ temporary: bool,
+ ) -> Result {
let materialized = self.parse_keyword(Keyword::MATERIALIZED);
self.expect_keyword(Keyword::VIEW)?;
+ let if_not_exists = dialect_of!(self is SQLiteDialect|GenericDialect)
+ && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
// Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
// ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
let name = self.parse_object_name()?;
@@ -2971,6 +2994,15 @@ impl<'a> Parser<'a> {
self.expect_keyword(Keyword::AS)?;
let query = Box::new(self.parse_query()?);
// Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
+
+ let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
+ && self.parse_keywords(&[
+ Keyword::WITH,
+ Keyword::NO,
+ Keyword::SCHEMA,
+ Keyword::BINDING,
+ ]);
+
Ok(Statement::CreateView {
name,
columns,
@@ -2979,6 +3011,9 @@ impl<'a> Parser<'a> {
or_replace,
with_options,
cluster_by,
+ with_no_schema_binding,
+ if_not_exists,
+ temporary,
})
}
@@ -4688,7 +4723,11 @@ impl<'a> Parser<'a> {
pub fn parse_literal_string(&mut self) -> Result {
let next_token = self.next_token();
match next_token.token {
- Token::Word(Word { value, keyword, .. }) if keyword == Keyword::NoKeyword => Ok(value),
+ Token::Word(Word {
+ value,
+ keyword: Keyword::NoKeyword,
+ ..
+ }) => Ok(value),
Token::SingleQuotedString(s) => Ok(s),
Token::DoubleQuotedString(s) => Ok(s),
Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
@@ -5038,6 +5077,27 @@ impl<'a> Parser<'a> {
break;
}
}
+
+ // BigQuery accepts any number of quoted identifiers of a table name.
+ // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
+ if dialect_of!(self is BigQueryDialect)
+ && idents.iter().any(|ident| ident.value.contains('.'))
+ {
+ idents = idents
+ .into_iter()
+ .flat_map(|ident| {
+ ident
+ .value
+ .split('.')
+ .map(|value| Ident {
+ value: value.into(),
+ quote_style: ident.quote_style,
+ })
+ .collect::>()
+ })
+ .collect()
+ }
+
Ok(ObjectName(idents))
}
@@ -5303,12 +5363,21 @@ impl<'a> Parser<'a> {
} else {
None
};
-
let returning = if self.parse_keyword(Keyword::RETURNING) {
Some(self.parse_comma_separated(Parser::parse_select_item)?)
} else {
None
};
+ let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
+ self.parse_comma_separated(Parser::parse_order_by_expr)?
+ } else {
+ vec![]
+ };
+ let limit = if self.parse_keyword(Keyword::LIMIT) {
+ self.parse_limit()?
+ } else {
+ None
+ };
Ok(Statement::Delete {
tables,
@@ -5316,6 +5385,8 @@ impl<'a> Parser<'a> {
using,
selection,
returning,
+ order_by,
+ limit,
})
}
@@ -5396,6 +5467,7 @@ impl<'a> Parser<'a> {
with,
body: Box::new(SetExpr::Insert(insert)),
limit: None,
+ limit_by: vec![],
order_by: vec![],
offset: None,
fetch: None,
@@ -5407,6 +5479,7 @@ impl<'a> Parser<'a> {
with,
body: Box::new(SetExpr::Update(update)),
limit: None,
+ limit_by: vec![],
order_by: vec![],
offset: None,
fetch: None,
@@ -5433,7 +5506,7 @@ impl<'a> Parser<'a> {
offset = Some(self.parse_offset()?)
}
- if dialect_of!(self is GenericDialect | MySqlDialect)
+ if dialect_of!(self is GenericDialect | MySqlDialect | ClickHouseDialect)
&& limit.is_some()
&& offset.is_none()
&& self.consume_token(&Token::Comma)
@@ -5448,6 +5521,14 @@ impl<'a> Parser<'a> {
}
}
+ let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
+ && self.parse_keyword(Keyword::BY)
+ {
+ self.parse_comma_separated(Parser::parse_expr)?
+ } else {
+ vec![]
+ };
+
let fetch = if self.parse_keyword(Keyword::FETCH) {
Some(self.parse_fetch()?)
} else {
@@ -5464,6 +5545,7 @@ impl<'a> Parser<'a> {
body,
order_by,
limit,
+ limit_by,
offset,
fetch,
locks,
@@ -5797,8 +5879,8 @@ impl<'a> Parser<'a> {
self.expect_token(&Token::Colon)?;
} else if self.parse_keyword(Keyword::ROLE) {
let context_modifier = match modifier {
- Some(keyword) if keyword == Keyword::LOCAL => ContextModifier::Local,
- Some(keyword) if keyword == Keyword::SESSION => ContextModifier::Session,
+ Some(Keyword::LOCAL) => ContextModifier::Local,
+ Some(Keyword::SESSION) => ContextModifier::Session,
_ => ContextModifier::None,
};
@@ -6230,9 +6312,8 @@ impl<'a> Parser<'a> {
| TableFactor::Table { alias, .. }
| TableFactor::UNNEST { alias, .. }
| TableFactor::TableFunction { alias, .. }
- | TableFactor::Pivot {
- pivot_alias: alias, ..
- }
+ | TableFactor::Pivot { alias, .. }
+ | TableFactor::Unpivot { alias, .. }
| TableFactor::NestedJoin { alias, .. } => {
// but not `FROM (mytable AS alias1) AS alias2`.
if let Some(inner_alias) = alias {
@@ -6311,11 +6392,6 @@ impl<'a> Parser<'a> {
let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
- // Pivot
- if self.parse_keyword(Keyword::PIVOT) {
- return self.parse_pivot_table_factor(name, alias);
- }
-
// MSSQL-specific table hints:
let mut with_hints = vec![];
if self.parse_keyword(Keyword::WITH) {
@@ -6327,14 +6403,25 @@ impl<'a> Parser<'a> {
self.prev_token();
}
};
- Ok(TableFactor::Table {
+
+ let mut table = TableFactor::Table {
name,
alias,
args,
with_hints,
version,
partitions,
- })
+ };
+
+ while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
+ table = match kw {
+ Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
+ Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
+ _ => unreachable!(),
+ }
+ }
+
+ Ok(table)
}
}
@@ -6371,8 +6458,7 @@ impl<'a> Parser<'a> {
pub fn parse_pivot_table_factor(
&mut self,
- name: ObjectName,
- table_alias: Option,
+ table: TableFactor,
) -> Result {
self.expect_token(&Token::LParen)?;
let function_name = match self.next_token().token {
@@ -6389,12 +6475,32 @@ impl<'a> Parser<'a> {
self.expect_token(&Token::RParen)?;
let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
Ok(TableFactor::Pivot {
- name,
- table_alias,
+ table: Box::new(table),
aggregate_function: function,
value_column,
pivot_values,
- pivot_alias: alias,
+ alias,
+ })
+ }
+
+ pub fn parse_unpivot_table_factor(
+ &mut self,
+ table: TableFactor,
+ ) -> Result {
+ self.expect_token(&Token::LParen)?;
+ let value = self.parse_identifier()?;
+ self.expect_keyword(Keyword::FOR)?;
+ let name = self.parse_identifier()?;
+ self.expect_keyword(Keyword::IN)?;
+ let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
+ self.expect_token(&Token::RParen)?;
+ let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
+ Ok(TableFactor::Unpivot {
+ table: Box::new(table),
+ value,
+ name,
+ columns,
+ alias,
})
}
@@ -6817,7 +6923,7 @@ impl<'a> Parser<'a> {
}
}
- /// Parse an [`WildcardAdditionalOptions`](WildcardAdditionalOptions) information for wildcard select items.
+ /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
///
/// If it is not possible to parse it, will return an option.
pub fn parse_wildcard_additional_options(
diff --git a/src/test_utils.rs b/src/test_utils.rs
index b81cd5f4e..f0c5e425a 100644
--- a/src/test_utils.rs
+++ b/src/test_utils.rs
@@ -31,6 +31,9 @@ use crate::parser::{Parser, ParserError};
use crate::tokenizer::Tokenizer;
use crate::{ast::*, parser::ParserOptions};
+#[cfg(test)]
+use pretty_assertions::assert_eq;
+
/// Tests use the methods on this struct to invoke the parser on one or
/// multiple dialects.
pub struct TestedDialects {
@@ -159,6 +162,24 @@ impl TestedDialects {
}
}
+ /// Ensures that `sql` parses as a single [`Select`], and that additionally:
+ ///
+ /// 1. parsing `sql` results in the same [`Statement`] as parsing
+ /// `canonical`.
+ ///
+ /// 2. re-serializing the result of parsing `sql` produces the same
+ /// `canonical` sql string
+ pub fn verified_only_select_with_canonical(&self, query: &str, canonical: &str) -> Select {
+ let q = match self.one_statement_parses_to(query, canonical) {
+ Statement::Query(query) => *query,
+ _ => panic!("Expected Query"),
+ };
+ match *q.body {
+ SetExpr::Select(s) => *s,
+ _ => panic!("Expected SetExpr::Select"),
+ }
+ }
+
/// Ensures that `sql` parses as an [`Expr`], and that
/// re-serializing the parse result produces the same `sql`
/// string (is not modified after a serialization round-trip).
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index 175b5d3b1..067aa5a84 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -1368,7 +1368,7 @@ fn peeking_take_while(chars: &mut State, mut predicate: impl FnMut(char) -> bool
#[cfg(test)]
mod tests {
use super::*;
- use crate::dialect::{GenericDialect, MsSqlDialect};
+ use crate::dialect::{ClickHouseDialect, GenericDialect, MsSqlDialect};
#[test]
fn tokenizer_error_impl() {
@@ -1414,6 +1414,28 @@ mod tests {
compare(expected, tokens);
}
+ #[test]
+ fn tokenize_clickhouse_double_equal() {
+ let sql = String::from("SELECT foo=='1'");
+ let dialect = ClickHouseDialect {};
+ let mut tokenizer = Tokenizer::new(&dialect, &sql);
+ let tokens = tokenizer.tokenize().unwrap();
+
+ let expected = vec![
+ Token::make_keyword("SELECT"),
+ Token::Whitespace(Whitespace::Space),
+ Token::Word(Word {
+ value: "foo".to_string(),
+ quote_style: None,
+ keyword: Keyword::NoKeyword,
+ }),
+ Token::DoubleEq,
+ Token::SingleQuotedString("1".to_string()),
+ ];
+
+ compare(expected, tokens);
+ }
+
#[test]
fn tokenize_select_exponent() {
let sql = String::from("SELECT 1e10, 1e-10, 1e+10, 1ea, 1e-10a, 1e-10-10");
diff --git a/tests/sqlparser_bigquery.rs b/tests/sqlparser_bigquery.rs
index 90f721a25..7a9a8d1c4 100644
--- a/tests/sqlparser_bigquery.rs
+++ b/tests/sqlparser_bigquery.rs
@@ -13,6 +13,8 @@
#[macro_use]
mod test_utils;
+use std::ops::Deref;
+
use sqlparser::ast::*;
use sqlparser::dialect::{BigQueryDialect, GenericDialect};
use sqlparser::parser::ParserError;
@@ -85,9 +87,24 @@ fn parse_raw_literal() {
#[test]
fn parse_table_identifiers() {
- fn test_table_ident(ident: &str, expected: Vec) {
+ /// Parses a table identifier ident and verifies that re-serializing the
+ /// parsed identifier produces the original ident string.
+ ///
+ /// In some cases, re-serializing the result of the parsed ident is not
+ /// expected to produce the original ident string. canonical is provided
+ /// instead as the canonical representation of the identifier for comparison.
+ /// For example, re-serializing the result of ident `foo.bar` produces
+ /// the equivalent canonical representation `foo`.`bar`
+ fn test_table_ident(ident: &str, canonical: Option<&str>, expected: Vec) {
let sql = format!("SELECT 1 FROM {ident}");
- let select = bigquery().verified_only_select(&sql);
+ let canonical = canonical.map(|ident| format!("SELECT 1 FROM {ident}"));
+
+ let select = if let Some(canonical) = canonical {
+ bigquery().verified_only_select_with_canonical(&sql, canonical.deref())
+ } else {
+ bigquery().verified_only_select(&sql)
+ };
+
assert_eq!(
select.from,
vec![TableWithJoins {
@@ -103,26 +120,30 @@ fn parse_table_identifiers() {
},]
);
}
+
fn test_table_ident_err(ident: &str) {
let sql = format!("SELECT 1 FROM {ident}");
assert!(bigquery().parse_sql_statements(&sql).is_err());
}
- test_table_ident("da-sh-es", vec![Ident::new("da-sh-es")]);
+ test_table_ident("da-sh-es", None, vec![Ident::new("da-sh-es")]);
- test_table_ident("`spa ce`", vec![Ident::with_quote('`', "spa ce")]);
+ test_table_ident("`spa ce`", None, vec![Ident::with_quote('`', "spa ce")]);
test_table_ident(
"`!@#$%^&*()-=_+`",
+ None,
vec![Ident::with_quote('`', "!@#$%^&*()-=_+")],
);
test_table_ident(
"_5abc.dataField",
+ None,
vec![Ident::new("_5abc"), Ident::new("dataField")],
);
test_table_ident(
"`5abc`.dataField",
+ None,
vec![Ident::with_quote('`', "5abc"), Ident::new("dataField")],
);
@@ -130,6 +151,7 @@ fn parse_table_identifiers() {
test_table_ident(
"abc5.dataField",
+ None,
vec![Ident::new("abc5"), Ident::new("dataField")],
);
@@ -137,13 +159,76 @@ fn parse_table_identifiers() {
test_table_ident(
"`GROUP`.dataField",
+ None,
vec![Ident::with_quote('`', "GROUP"), Ident::new("dataField")],
);
// TODO: this should be error
// test_table_ident_err("GROUP.dataField");
- test_table_ident("abc5.GROUP", vec![Ident::new("abc5"), Ident::new("GROUP")]);
+ test_table_ident(
+ "abc5.GROUP",
+ None,
+ vec![Ident::new("abc5"), Ident::new("GROUP")],
+ );
+
+ test_table_ident(
+ "`foo.bar.baz`",
+ Some("`foo`.`bar`.`baz`"),
+ vec![
+ Ident::with_quote('`', "foo"),
+ Ident::with_quote('`', "bar"),
+ Ident::with_quote('`', "baz"),
+ ],
+ );
+
+ test_table_ident(
+ "`foo.bar`.`baz`",
+ Some("`foo`.`bar`.`baz`"),
+ vec![
+ Ident::with_quote('`', "foo"),
+ Ident::with_quote('`', "bar"),
+ Ident::with_quote('`', "baz"),
+ ],
+ );
+
+ test_table_ident(
+ "`foo`.`bar.baz`",
+ Some("`foo`.`bar`.`baz`"),
+ vec![
+ Ident::with_quote('`', "foo"),
+ Ident::with_quote('`', "bar"),
+ Ident::with_quote('`', "baz"),
+ ],
+ );
+
+ test_table_ident(
+ "`foo`.`bar`.`baz`",
+ Some("`foo`.`bar`.`baz`"),
+ vec![
+ Ident::with_quote('`', "foo"),
+ Ident::with_quote('`', "bar"),
+ Ident::with_quote('`', "baz"),
+ ],
+ );
+
+ test_table_ident(
+ "`5abc.dataField`",
+ Some("`5abc`.`dataField`"),
+ vec![
+ Ident::with_quote('`', "5abc"),
+ Ident::with_quote('`', "dataField"),
+ ],
+ );
+
+ test_table_ident(
+ "`_5abc.da-sh-es`",
+ Some("`_5abc`.`da-sh-es`"),
+ vec![
+ Ident::with_quote('`', "_5abc"),
+ Ident::with_quote('`', "da-sh-es"),
+ ],
+ );
}
#[test]
diff --git a/tests/sqlparser_clickhouse.rs b/tests/sqlparser_clickhouse.rs
index a14598b3d..9efe4a368 100644
--- a/tests/sqlparser_clickhouse.rs
+++ b/tests/sqlparser_clickhouse.rs
@@ -25,6 +25,7 @@ use sqlparser::ast::TableFactor::Table;
use sqlparser::ast::*;
use sqlparser::dialect::ClickHouseDialect;
+use sqlparser::dialect::GenericDialect;
#[test]
fn parse_map_access_expr() {
@@ -336,9 +337,34 @@ fn parse_create_table() {
);
}
+#[test]
+fn parse_double_equal() {
+ clickhouse().one_statement_parses_to(
+ r#"SELECT foo FROM bar WHERE buz == 'buz'"#,
+ r#"SELECT foo FROM bar WHERE buz = 'buz'"#,
+ );
+}
+
+#[test]
+fn parse_limit_by() {
+ clickhouse_and_generic().verified_stmt(
+ r#"SELECT * FROM default.last_asset_runs_mv ORDER BY created_at DESC LIMIT 1 BY asset"#,
+ );
+ clickhouse_and_generic().verified_stmt(
+ r#"SELECT * FROM default.last_asset_runs_mv ORDER BY created_at DESC LIMIT 1 BY asset, toStartOfDay(created_at)"#,
+ );
+}
+
fn clickhouse() -> TestedDialects {
TestedDialects {
dialects: vec![Box::new(ClickHouseDialect {})],
options: None,
}
}
+
+fn clickhouse_and_generic() -> TestedDialects {
+ TestedDialects {
+ dialects: vec![Box::new(ClickHouseDialect {}), Box::new(GenericDialect {})],
+ options: None,
+ }
+}
diff --git a/tests/sqlparser_common.rs b/tests/sqlparser_common.rs
index aaf0df007..1511aa76e 100644
--- a/tests/sqlparser_common.rs
+++ b/tests/sqlparser_common.rs
@@ -20,7 +20,7 @@
use matches::assert_matches;
use sqlparser::ast::SelectItem::UnnamedExpr;
-use sqlparser::ast::TableFactor::Pivot;
+use sqlparser::ast::TableFactor::{Pivot, Unpivot};
use sqlparser::ast::*;
use sqlparser::dialect::{
AnsiDialect, BigQueryDialect, ClickHouseDialect, DuckDbDialect, GenericDialect, HiveDialect,
@@ -261,6 +261,7 @@ fn parse_update_set_from() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -525,6 +526,7 @@ fn parse_where_delete_statement() {
using,
selection,
returning,
+ ..
} => {
assert_eq!(
TableFactor::Table {
@@ -565,6 +567,7 @@ fn parse_where_delete_with_alias_statement() {
using,
selection,
returning,
+ ..
} => {
assert_eq!(
TableFactor::Table {
@@ -2069,6 +2072,8 @@ fn parse_extract() {
verified_stmt("SELECT EXTRACT(MONTH FROM d)");
verified_stmt("SELECT EXTRACT(WEEK FROM d)");
verified_stmt("SELECT EXTRACT(DAY FROM d)");
+ verified_stmt("SELECT EXTRACT(DAYOFWEEK FROM d)");
+ verified_stmt("SELECT EXTRACT(DAYOFYEAR FROM d)");
verified_stmt("SELECT EXTRACT(DATE FROM d)");
verified_stmt("SELECT EXTRACT(HOUR FROM d)");
verified_stmt("SELECT EXTRACT(MINUTE FROM d)");
@@ -2082,6 +2087,7 @@ fn parse_extract() {
verified_stmt("SELECT EXTRACT(DOY FROM d)");
verified_stmt("SELECT EXTRACT(EPOCH FROM d)");
verified_stmt("SELECT EXTRACT(ISODOW FROM d)");
+ verified_stmt("SELECT EXTRACT(ISOWEEK FROM d)");
verified_stmt("SELECT EXTRACT(ISOYEAR FROM d)");
verified_stmt("SELECT EXTRACT(JULIAN FROM d)");
verified_stmt("SELECT EXTRACT(MICROSECOND FROM d)");
@@ -2094,6 +2100,7 @@ fn parse_extract() {
verified_stmt("SELECT EXTRACT(TIMEZONE FROM d)");
verified_stmt("SELECT EXTRACT(TIMEZONE_HOUR FROM d)");
verified_stmt("SELECT EXTRACT(TIMEZONE_MINUTE FROM d)");
+ verified_stmt("SELECT EXTRACT(TIME FROM d)");
let res = parse_sql_statements("SELECT EXTRACT(JIFFY FROM d)");
assert_eq!(
@@ -2656,6 +2663,7 @@ fn parse_create_table_as_table() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -2679,6 +2687,7 @@ fn parse_create_table_as_table() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -3970,6 +3979,7 @@ fn parse_interval_and_or_xor() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -5334,6 +5344,9 @@ fn parse_create_view() {
materialized,
with_options,
cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
} => {
assert_eq!("myschema.myview", name.to_string());
assert_eq!(Vec::::new(), columns);
@@ -5342,6 +5355,9 @@ fn parse_create_view() {
assert!(!or_replace);
assert_eq!(with_options, vec![]);
assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(!temporary);
}
_ => unreachable!(),
}
@@ -5382,6 +5398,9 @@ fn parse_create_view_with_columns() {
query,
materialized,
cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
} => {
assert_eq!("v", name.to_string());
assert_eq!(columns, vec![Ident::new("has"), Ident::new("cols")]);
@@ -5390,6 +5409,40 @@ fn parse_create_view_with_columns() {
assert!(!materialized);
assert!(!or_replace);
assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(!temporary);
+ }
+ _ => unreachable!(),
+ }
+}
+
+#[test]
+fn parse_create_view_temporary() {
+ let sql = "CREATE TEMPORARY VIEW myschema.myview AS SELECT foo FROM bar";
+ match verified_stmt(sql) {
+ Statement::CreateView {
+ name,
+ columns,
+ query,
+ or_replace,
+ materialized,
+ with_options,
+ cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
+ } => {
+ assert_eq!("myschema.myview", name.to_string());
+ assert_eq!(Vec::::new(), columns);
+ assert_eq!("SELECT foo FROM bar", query.to_string());
+ assert!(!materialized);
+ assert!(!or_replace);
+ assert_eq!(with_options, vec![]);
+ assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(temporary);
}
_ => unreachable!(),
}
@@ -5407,6 +5460,9 @@ fn parse_create_or_replace_view() {
query,
materialized,
cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
} => {
assert_eq!("v", name.to_string());
assert_eq!(columns, vec![]);
@@ -5415,6 +5471,9 @@ fn parse_create_or_replace_view() {
assert!(!materialized);
assert!(or_replace);
assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(!temporary);
}
_ => unreachable!(),
}
@@ -5436,6 +5495,9 @@ fn parse_create_or_replace_materialized_view() {
query,
materialized,
cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
} => {
assert_eq!("v", name.to_string());
assert_eq!(columns, vec![]);
@@ -5444,6 +5506,9 @@ fn parse_create_or_replace_materialized_view() {
assert!(materialized);
assert!(or_replace);
assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(!temporary);
}
_ => unreachable!(),
}
@@ -5461,6 +5526,9 @@ fn parse_create_materialized_view() {
materialized,
with_options,
cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
} => {
assert_eq!("myschema.myview", name.to_string());
assert_eq!(Vec::::new(), columns);
@@ -5469,6 +5537,9 @@ fn parse_create_materialized_view() {
assert_eq!(with_options, vec![]);
assert!(!or_replace);
assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(!temporary);
}
_ => unreachable!(),
}
@@ -5486,6 +5557,9 @@ fn parse_create_materialized_view_with_cluster_by() {
materialized,
with_options,
cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
} => {
assert_eq!("myschema.myview", name.to_string());
assert_eq!(Vec::::new(), columns);
@@ -5494,6 +5568,9 @@ fn parse_create_materialized_view_with_cluster_by() {
assert_eq!(with_options, vec![]);
assert!(!or_replace);
assert_eq!(cluster_by, vec![Ident::new("foo")]);
+ assert!(!late_binding);
+ assert!(!if_not_exists);
+ assert!(!temporary);
}
_ => unreachable!(),
}
@@ -6410,6 +6487,7 @@ fn parse_merge() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -6812,10 +6890,10 @@ fn parse_time_functions() {
// Validating Parenthesis
let sql_without_parens = format!("SELECT {}", func_name);
- let mut ast_without_parens = select_localtime_func_call_ast.clone();
+ let mut ast_without_parens = select_localtime_func_call_ast;
ast_without_parens.special = true;
assert_eq!(
- &Expr::Function(ast_without_parens.clone()),
+ &Expr::Function(ast_without_parens),
expr_from_projection(&verified_only_select(&sql_without_parens).projection[0])
);
}
@@ -7270,10 +7348,16 @@ fn parse_pivot_table() {
assert_eq!(
verified_only_select(sql).from[0].relation,
Pivot {
- name: ObjectName(vec![Ident::new("monthly_sales")]),
- table_alias: Some(TableAlias {
- name: Ident::new("a"),
- columns: vec![]
+ table: Box::new(TableFactor::Table {
+ name: ObjectName(vec![Ident::new("monthly_sales")]),
+ alias: Some(TableAlias {
+ name: Ident::new("a"),
+ columns: vec![]
+ }),
+ args: None,
+ with_hints: vec![],
+ version: None,
+ partitions: vec![],
}),
aggregate_function: Expr::Function(Function {
name: ObjectName(vec![Ident::new("SUM")]),
@@ -7292,7 +7376,7 @@ fn parse_pivot_table() {
Value::SingleQuotedString("MAR".to_string()),
Value::SingleQuotedString("APR".to_string()),
],
- pivot_alias: Some(TableAlias {
+ alias: Some(TableAlias {
name: Ident {
value: "p".to_string(),
quote_style: None
@@ -7303,17 +7387,15 @@ fn parse_pivot_table() {
);
assert_eq!(verified_stmt(sql).to_string(), sql);
+ // parsing should succeed with empty alias
let sql_without_table_alias = concat!(
"SELECT * FROM monthly_sales ",
"PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ",
"ORDER BY EMPID"
);
assert_matches!(
- verified_only_select(sql_without_table_alias).from[0].relation,
- Pivot {
- table_alias: None, // parsing should succeed with empty alias
- ..
- }
+ &verified_only_select(sql_without_table_alias).from[0].relation,
+ Pivot { table, .. } if matches!(&**table, TableFactor::Table { alias: None, .. })
);
assert_eq!(
verified_stmt(sql_without_table_alias).to_string(),
@@ -7321,6 +7403,135 @@ fn parse_pivot_table() {
);
}
+#[test]
+fn parse_unpivot_table() {
+ let sql = concat!(
+ "SELECT * FROM sales AS s ",
+ "UNPIVOT(quantity FOR quarter IN (Q1, Q2, Q3, Q4)) AS u (product, quarter, quantity)"
+ );
+
+ pretty_assertions::assert_eq!(
+ verified_only_select(sql).from[0].relation,
+ Unpivot {
+ table: Box::new(TableFactor::Table {
+ name: ObjectName(vec![Ident::new("sales")]),
+ alias: Some(TableAlias {
+ name: Ident::new("s"),
+ columns: vec![]
+ }),
+ args: None,
+ with_hints: vec![],
+ version: None,
+ partitions: vec![],
+ }),
+ value: Ident {
+ value: "quantity".to_string(),
+ quote_style: None
+ },
+
+ name: Ident {
+ value: "quarter".to_string(),
+ quote_style: None
+ },
+ columns: ["Q1", "Q2", "Q3", "Q4"]
+ .into_iter()
+ .map(Ident::new)
+ .collect(),
+ alias: Some(TableAlias {
+ name: Ident::new("u"),
+ columns: ["product", "quarter", "quantity"]
+ .into_iter()
+ .map(Ident::new)
+ .collect()
+ }),
+ }
+ );
+ assert_eq!(verified_stmt(sql).to_string(), sql);
+
+ let sql_without_aliases = concat!(
+ "SELECT * FROM sales ",
+ "UNPIVOT(quantity FOR quarter IN (Q1, Q2, Q3, Q4))"
+ );
+
+ assert_matches!(
+ &verified_only_select(sql_without_aliases).from[0].relation,
+ Unpivot {
+ table,
+ alias: None,
+ ..
+ } if matches!(&**table, TableFactor::Table { alias: None, .. })
+ );
+ assert_eq!(
+ verified_stmt(sql_without_aliases).to_string(),
+ sql_without_aliases
+ );
+}
+
+#[test]
+fn parse_pivot_unpivot_table() {
+ let sql = concat!(
+ "SELECT * FROM census AS c ",
+ "UNPIVOT(population FOR year IN (population_2000, population_2010)) AS u ",
+ "PIVOT(sum(population) FOR year IN ('population_2000', 'population_2010')) AS p"
+ );
+
+ pretty_assertions::assert_eq!(
+ verified_only_select(sql).from[0].relation,
+ Pivot {
+ table: Box::new(Unpivot {
+ table: Box::new(TableFactor::Table {
+ name: ObjectName(vec![Ident::new("census")]),
+ alias: Some(TableAlias {
+ name: Ident::new("c"),
+ columns: vec![]
+ }),
+ args: None,
+ with_hints: vec![],
+ version: None,
+ partitions: vec![],
+ }),
+ value: Ident {
+ value: "population".to_string(),
+ quote_style: None
+ },
+
+ name: Ident {
+ value: "year".to_string(),
+ quote_style: None
+ },
+ columns: ["population_2000", "population_2010"]
+ .into_iter()
+ .map(Ident::new)
+ .collect(),
+ alias: Some(TableAlias {
+ name: Ident::new("u"),
+ columns: vec![]
+ }),
+ }),
+ aggregate_function: Expr::Function(Function {
+ name: ObjectName(vec![Ident::new("sum")]),
+ args: (vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(
+ Expr::Identifier(Ident::new("population"))
+ ))]),
+ over: None,
+ distinct: false,
+ special: false,
+ order_by: vec![],
+ }),
+ value_column: vec![Ident::new("year")],
+ pivot_values: vec![
+ Value::SingleQuotedString("population_2000".to_string()),
+ Value::SingleQuotedString("population_2010".to_string())
+ ],
+ alias: Some(TableAlias {
+ name: Ident::new("p"),
+ columns: vec![]
+ }),
+ }
+ );
+ assert_eq!(verified_stmt(sql).to_string(), sql);
+}
+
/// Makes a predicate that looks like ((user_id = $id) OR user_id = $2...)
fn make_where_clause(num: usize) -> String {
use std::fmt::Write;
diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs
index 135e5d138..f9eb4d8fb 100644
--- a/tests/sqlparser_mssql.rs
+++ b/tests/sqlparser_mssql.rs
@@ -92,6 +92,7 @@ fn parse_create_procedure() {
body: vec![Statement::Query(Box::new(Query {
with: None,
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -493,6 +494,7 @@ fn parse_substring_in_select() {
assert_eq!(
Box::new(Query {
with: None,
+
body: Box::new(SetExpr::Select(Box::new(Select {
distinct: Some(Distinct::Distinct),
top: None,
@@ -532,6 +534,7 @@ fn parse_substring_in_select() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
diff --git a/tests/sqlparser_mysql.rs b/tests/sqlparser_mysql.rs
index 80ef9f981..80b9dcfd8 100644
--- a/tests/sqlparser_mysql.rs
+++ b/tests/sqlparser_mysql.rs
@@ -562,6 +562,7 @@ fn parse_escaped_quote_identifiers_with_escape() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -604,6 +605,7 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -643,6 +645,7 @@ fn parse_escaped_backticks_with_escape() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -682,6 +685,7 @@ fn parse_escaped_backticks_with_no_escape() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -956,6 +960,7 @@ fn parse_simple_insert() {
})),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -991,6 +996,7 @@ fn parse_empty_row_insert() {
})),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -1049,6 +1055,7 @@ fn parse_insert_with_on_duplicate_update() {
})),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -1315,6 +1322,38 @@ fn parse_update_with_joins() {
}
}
+#[test]
+fn parse_delete_with_order_by() {
+ let sql = "DELETE FROM customers ORDER BY id DESC";
+ match mysql().verified_stmt(sql) {
+ Statement::Delete { order_by, .. } => {
+ assert_eq!(
+ vec![OrderByExpr {
+ expr: Expr::Identifier(Ident {
+ value: "id".to_owned(),
+ quote_style: None
+ }),
+ asc: Some(false),
+ nulls_first: None,
+ }],
+ order_by
+ );
+ }
+ _ => unreachable!(),
+ }
+}
+
+#[test]
+fn parse_delete_with_limit() {
+ let sql = "DELETE FROM customers LIMIT 100";
+ match mysql().verified_stmt(sql) {
+ Statement::Delete { limit, .. } => {
+ assert_eq!(Some(Expr::Value(number("100"))), limit);
+ }
+ _ => unreachable!(),
+ }
+}
+
#[test]
fn parse_alter_table_drop_primary_key() {
assert_matches!(
@@ -1396,6 +1435,7 @@ fn parse_substring_in_select() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -1676,6 +1716,7 @@ fn parse_hex_string_introducer() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs
index bb3857817..fe336bda7 100644
--- a/tests/sqlparser_postgres.rs
+++ b/tests/sqlparser_postgres.rs
@@ -1000,6 +1000,7 @@ fn parse_copy_to() {
}))),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
@@ -2046,6 +2047,7 @@ fn parse_array_subquery_expr() {
}),
order_by: vec![],
limit: None,
+ limit_by: vec![],
offset: None,
fetch: None,
locks: vec![],
diff --git a/tests/sqlparser_redshift.rs b/tests/sqlparser_redshift.rs
index f17ca5841..5ae539b3c 100644
--- a/tests/sqlparser_redshift.rs
+++ b/tests/sqlparser_redshift.rs
@@ -16,6 +16,7 @@ mod test_utils;
use test_utils::*;
use sqlparser::ast::*;
+use sqlparser::dialect::GenericDialect;
use sqlparser::dialect::RedshiftSqlDialect;
#[test]
@@ -272,6 +273,13 @@ fn redshift() -> TestedDialects {
}
}
+fn redshift_and_generic() -> TestedDialects {
+ TestedDialects {
+ dialects: vec![Box::new(RedshiftSqlDialect {}), Box::new(GenericDialect {})],
+ options: None,
+ }
+}
+
#[test]
fn test_sharp() {
let sql = "SELECT #_of_values";
@@ -281,3 +289,9 @@ fn test_sharp() {
select.projection[0]
);
}
+
+#[test]
+fn test_create_view_with_no_schema_binding() {
+ redshift_and_generic()
+ .verified_stmt("CREATE VIEW myevent AS SELECT eventname FROM event WITH NO SCHEMA BINDING");
+}
diff --git a/tests/sqlparser_sqlite.rs b/tests/sqlparser_sqlite.rs
index fd7a22461..39a82cc8b 100644
--- a/tests/sqlparser_sqlite.rs
+++ b/tests/sqlparser_sqlite.rs
@@ -61,6 +61,37 @@ fn parse_create_virtual_table() {
sqlite_and_generic().verified_stmt(sql);
}
+#[test]
+fn parse_create_view_temporary_if_not_exists() {
+ let sql = "CREATE TEMPORARY VIEW IF NOT EXISTS myschema.myview AS SELECT foo FROM bar";
+ match sqlite_and_generic().verified_stmt(sql) {
+ Statement::CreateView {
+ name,
+ columns,
+ query,
+ or_replace,
+ materialized,
+ with_options,
+ cluster_by,
+ with_no_schema_binding: late_binding,
+ if_not_exists,
+ temporary,
+ } => {
+ assert_eq!("myschema.myview", name.to_string());
+ assert_eq!(Vec::::new(), columns);
+ assert_eq!("SELECT foo FROM bar", query.to_string());
+ assert!(!materialized);
+ assert!(!or_replace);
+ assert_eq!(with_options, vec![]);
+ assert_eq!(cluster_by, vec![]);
+ assert!(!late_binding);
+ assert!(if_not_exists);
+ assert!(temporary);
+ }
+ _ => unreachable!(),
+ }
+}
+
#[test]
fn double_equality_operator() {
// Sqlite supports this operator: https://www.sqlite.org/lang_expr.html#binaryops
@@ -259,6 +290,24 @@ fn parse_create_table_with_strict() {
}
}
+#[test]
+fn parse_attach_database() {
+ let sql = "ATTACH DATABASE 'test.db' AS test";
+ let verified_stmt = sqlite().verified_stmt(sql);
+ assert_eq!(sql, format!("{}", verified_stmt));
+ match verified_stmt {
+ Statement::AttachDatabase {
+ schema_name,
+ database_file_name: Expr::Value(Value::SingleQuotedString(literal_name)),
+ database: true,
+ } => {
+ assert_eq!(schema_name.value, "test");
+ assert_eq!(literal_name, "test.db");
+ }
+ _ => unreachable!(),
+ }
+}
+
fn sqlite() -> TestedDialects {
TestedDialects {
dialects: vec![Box::new(SQLiteDialect {})],