Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix bq array with datatype #6278

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 16 additions & 11 deletions core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLArrayExpr.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package com.alibaba.druid.sql.ast.expr;

import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLExprImpl;
import com.alibaba.druid.sql.ast.SQLObject;
Expand All @@ -26,16 +27,28 @@

public class SQLArrayExpr extends SQLExprImpl implements SQLReplaceable {
private SQLExpr expr;
private SQLDataType dataType;
private List<SQLExpr> values = new ArrayList<SQLExpr>();

public SQLArrayExpr() {
}

public SQLDataType getDataType() {
return dataType;
}

public void setDataType(SQLDataType dataType) {
this.dataType = dataType;
}

public SQLArrayExpr clone() {
SQLArrayExpr x = new SQLArrayExpr();
if (expr != null) {
x.setExpr(expr.clone());
}
if (dataType != null) {
x.setDataType(dataType.clone());
}
for (SQLExpr value : values) {
SQLExpr value2 = value.clone();
value2.setParent(x);
Expand Down Expand Up @@ -76,17 +89,9 @@ public void setValues(List<SQLExpr> values) {
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
if (expr != null) {
this.expr.accept(visitor);
}

if (values != null) {
for (SQLExpr value : values) {
if (value != null) {
value.accept(visitor);
}
}
}
acceptChild(visitor, expr);
acceptChild(visitor, dataType);
acceptChild(visitor, values);
}
visitor.endVisit(this);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,19 @@ public SQLCreateFunctionStatement parseCreateFunction() {
accept(Token.RPAREN);
} else {
lexer.nextIfIdentifier("R");
String script = lexer.stringVal();
if (script.startsWith("\"") && script.endsWith("\"")) {
script = script.substring(1, script.length() - 1);
}
createFunction.setWrappedSource(
lexer.stringVal()
script
);
accept(Token.LITERAL_TEXT_BLOCK);
if (lexer.token() == Token.LITERAL_TEXT_BLOCK || lexer.token() == Token.LITERAL_CHARS) {
lexer.nextToken();
} else {
setErrorEndPos(lexer.pos());
printError(lexer.token());
}
}
continue;
}
Expand Down
37 changes: 29 additions & 8 deletions core/src/main/java/com/alibaba/druid/sql/parser/SQLExprParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -586,12 +586,21 @@ && dialectFeatureEnabled(SQLTimestampExpr)) {
} else if (hash_lower == FnvHash.Constants.TIMESTAMP && lexer.token == Token.LITERAL_ALIAS) {
sqlExpr = new SQLTimestampExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.ARRAY && lexer.token == Token.LBRACKET) {
} else if (hash_lower == FnvHash.Constants.ARRAY && (lexer.token == Token.LBRACKET || lexer.token == Token.LT)) {
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(new SQLIdentifierExpr(ident));
accept(Token.LBRACKET);
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
if (lexer.nextIf(Token.LT)) {
SQLDataType sqlDataType = this.parseDataType();
array.setDataType(sqlDataType);
accept(Token.GT);
}

if (lexer.nextIf(Token.LBRACKET)) {
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
} else {
throw new ParserException("Syntax error. " + lexer.info());
}
sqlExpr = array;
} else {
sqlExpr = primaryIdentifierRest(hash_lower, ident);
Expand Down Expand Up @@ -1241,10 +1250,18 @@ && dialectFeatureEnabled(SQLTimestampExpr)) {
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(new SQLIdentifierExpr("ARRAY"));
lexer.nextToken();
if (lexer.nextIf(Token.LT)) {
SQLDataType sqlDataType = this.parseDataType();
array.setDataType(sqlDataType);
accept(Token.GT);
}
if (lexer.nextIf(Token.LBRACKET)) {
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
} else {
throw new ParserException("Syntax error. " + lexer.info());
}

accept(Token.LBRACKET);
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
sqlExpr = array;
break;
}
Expand Down Expand Up @@ -6404,7 +6421,11 @@ protected SQLStructExpr struct() {
SQLStructExpr structExpr = new SQLStructExpr();
accept(Token.LPAREN);
aliasedItems(structExpr.getItems(), structExpr);
accept(Token.RPAREN);
if (lexer.token == Token.GTGT) {
lexer.token = Token.RPAREN;
} else {
accept(Token.RPAREN);
}
return structExpr;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4893,11 +4893,7 @@ public boolean visit(SQLUnnestTableSource x) {
final List<SQLName> columns = x.getColumns();
final String alias = x.getAlias();
if (alias != null) {
if (columns.size() > 0) {
print0(ucase ? " AS " : " as ");
} else {
print(' ');
}
print0(ucase ? " AS " : " as ");
print0(alias);
}

Expand Down Expand Up @@ -7511,6 +7507,12 @@ public boolean visit(SQLArrayExpr x) {
}
}

if (x.getDataType() != null) {
print('<');
x.getDataType().accept(this);
print('>');
}

boolean brace = arrayValue && (dbType == DbType.hive || dbType == DbType.spark || dbType == DbType.odps);

print(brace ? '(' : '[');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.alibaba.druid.bvt.sql.SQLResourceTest;
import com.alibaba.druid.sql.SQLUtils;
import org.apache.commons.io.FileUtils;
import org.junit.Ignore;
import org.junit.Test;

import java.io.File;
Expand All @@ -20,25 +21,38 @@ protected File dir(String path) {
return new File(path);
}

@Ignore
@Test
public void dirTest() throws Exception {
// File dir = new File("/Users/wenshao/Downloads/goto_1894_sql");
// File dir = new File("/Users/wenshao/Downloads/BigQuery");
// File[] files = dir.listFiles();
// Arrays.sort(files, Comparator.comparing(e -> e.getName().toLowerCase()));
// for (File file : files) {
// if (file.getName().equals(".DS_Store")) {
// continue;
// }
//
// System.out.println(file.getAbsolutePath());
// String sql = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
// try {
// SQLUtils.parseStatements(sql, DbType.bigquery);
// file.delete();
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
// File dir = new File("/Users/wenshao/Downloads/goto_1894_sql");
File dir = new File("/Users/lingo/workspace/alibaba/druid/goto/all_etl_jobs/");
File[] files = dir.listFiles();
Arrays.sort(files, Comparator.comparing(e -> e.getName().toLowerCase()));
long total = files.length;
long success = 0L;
for (File file : files) {
if (file.getName().equals(".DS_Store")) {
continue;
}


String sql = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
try {
SQLUtils.parseStatements(sql, DbType.bigquery);
success += 1;
} catch (Exception e) {
e.printStackTrace();
System.out.println(file.getAbsolutePath());
}
}
System.out.println("success: " + success + "/" + total);
}

@Ignore
@Test
public void fileTest() throws Exception {
File file = new File("/Users/lingo/workspace/alibaba/druid/goto/all_etl_jobs/gojek-mart-kafka-gofood_challenges_campaign.sql");
String sql = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
SQLUtils.parseStatements(sql, DbType.bigquery);
}
}
43 changes: 39 additions & 4 deletions core/src/test/resources/bvt/parser/bigquery/0.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,38 @@
SELECT
campaign.key AS campaign_key,
campaign.start_date AS campaign_start_date,
campaign.end_date AS campaign_end_date,
FROM
UNNEST([
STRUCT(
'trial_campaign' AS key,
'2024-11-17T17:00:00+00:00' AS start_date,
'2024-12-01T16:59:59+00:00' AS end_date,
ARRAY<STRUCT<key STRING, start_date STRING, end_date STRING>>[
STRUCT('mission_1', '2024-11-17T17:00:00+00:00', '2024-11-24T16:59:59+00:00'),
STRUCT('mission_2', '2024-11-24T17:00:00+00:00', '2024-12-01T16:59:59+00:00')
] AS missions
)
]) AS campaign
--------------------
SELECT campaign.key AS campaign_key, campaign.start_date AS campaign_start_date, campaign.end_date AS campaign_end_date
FROM UNNEST([STRUCT('trial_campaign' AS key, '2024-11-17T17:00:00+00:00' AS start_date, '2024-12-01T16:59:59+00:00' AS end_date, ARRAY<STRUCT<key STRING, start_date STRING, end_date STRING>>[STRUCT('mission_1', '2024-11-17T17:00:00+00:00', '2024-11-24T16:59:59+00:00'),
STRUCT('mission_2', '2024-11-24T17:00:00+00:00', '2024-12-01T16:59:59+00:00')] AS missions)]) AS campaign
------------------------------------------------------------------------------------------------------------------------
CREATE TEMP FUNCTION invalidRoute(polyline1 STRING)
RETURNS BOOL
LANGUAGE js AS
"return (lengthOfEncodedPolyline(polyline1) <= 1)"
OPTIONS (library="gs://bi_cloud_storage_ees6see6/public/lib/polyutil_new.js");
--------------------
CREATE TEMPORARY FUNCTION invalidRoute (
polyline1 STRING
)
RETURNS BOOL
LANGUAGE js
OPTIONS (library = 'gs://bi_cloud_storage_ees6see6/public/lib/polyutil_new.js')
AS """return (lengthOfEncodedPolyline(polyline1) <= 1)""";
------------------------------------------------------------------------------------------------------------------------
create temporary function poly(polygon string)
returns string language js as """
var res = JSON.parse(polygon);
Expand Down Expand Up @@ -202,7 +237,7 @@ WITH Races AS (
STRUCT('Lewandowski' AS name, [25.0, 25.7, 26.3, 27.2] AS laps),
STRUCT('Kipketer' AS name, [23.2, 26.1, 27.3, 29.4] AS laps),
STRUCT('Berian' AS name, [23.7, 26.1, 27.0, 29.3] AS laps),
STRUCT('Nathan' AS name, ARRAY < FLOAT64 > [] AS laps),
STRUCT('Nathan' AS name, ARRAY<FLOAT64>[] AS laps),
STRUCT('David' AS name, NULL AS laps)] AS participants
)
SELECT name, sum(duration) AS finish_time
Expand Down Expand Up @@ -550,7 +585,7 @@ FROM UNNEST([2, 1, -2, 3, -2, 1, 2]) AS x;
--------------------
SELECT ARRAY_AGG(x LIMIT 5) AS array_agg
FROM UNNEST([2, 1, -2, 3, -2,
1, 2]) x;
1, 2]) AS x;
------------------------------------------------------------------------------------------------------------------------
ASSERT (
(SELECT COUNT(*) > 5 FROM UNNEST([1, 2, 3, 4, 5, 6]))
Expand Down Expand Up @@ -673,6 +708,6 @@ SELECT
FROM professors
GROUP BY item;
------------------------------------------------------------------------------------------------------------------------
select cast(json_parse(tickets_fields_value) as array(varchar)) as tickets_fields_value
select cast(json_parse(tickets_fields_value) as array<varchar>) as tickets_fields_value
--------------------
SELECT CAST(json_parse(tickets_fields_value) AS ARRAY(varchar)) AS tickets_fields_value
SELECT CAST(json_parse(tickets_fields_value) AS ARRAY<varchar>) AS tickets_fields_value
Loading