Skip to content

Commit

Permalink
[SPARK-50313][SQL][TESTS][FOLLOWUP] Regenerate golden files for Java 21
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
This pr uses the command

```
java -version
openjdk version "21.0.5" 2024-10-15 LTS
OpenJDK Runtime Environment Zulu21.38+21-CA (build 21.0.5+11-LTS)
OpenJDK 64-Bit Server VM Zulu21.38+21-CA (build 21.0.5+11-LTS, mixed mode, sharing)

SPARK_GENERATE_GOLDEN_FILES=1 build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite"
```

to regenerate golden files for Java 21.

### Why are the changes needed?
Regenerate golden files for Java 21.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Pass GitHub Actions

### Was this patch authored or co-authored using generative AI tooling?
No

Closes apache#48886 from LuciferYang/SPARK-50313-FOLLOWUP.

Authored-by: yangjie01 <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
  • Loading branch information
LuciferYang committed Nov 19, 2024
1 parent b74aa8c commit 87a5b37
Show file tree
Hide file tree
Showing 4 changed files with 1,214 additions and 46 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,299 @@
-- Automatically generated by SQLQueryTestSuite
-- !query
SELECT try_sum(col) FROM VALUES (5), (10), (15) AS tab(col)
-- !query schema
struct<try_sum(col):bigint>
-- !query output
30


-- !query
SELECT try_sum(col) FROM VALUES (5.0), (10.0), (15.0) AS tab(col)
-- !query schema
struct<try_sum(col):decimal(13,1)>
-- !query output
30.0


-- !query
SELECT try_sum(col) FROM VALUES (NULL), (10), (15) AS tab(col)
-- !query schema
struct<try_sum(col):bigint>
-- !query output
25


-- !query
SELECT try_sum(col) FROM VALUES (NULL), (NULL) AS tab(col)
-- !query schema
struct<try_sum(col):double>
-- !query output
NULL


-- !query
SELECT try_sum(col) FROM VALUES (9223372036854775807L), (1L) AS tab(col)
-- !query schema
struct<try_sum(col):bigint>
-- !query output
NULL


-- !query
SELECT try_sum(col) FROM VALUES (98765432109876543210987654321098765432BD), (98765432109876543210987654321098765432BD) AS tab(col)
-- !query schema
struct<try_sum(col):decimal(38,0)>
-- !query output
NULL


-- !query
SELECT try_sum(col) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col)
-- !query schema
struct<try_sum(col):interval month>
-- !query output
0-2


-- !query
SELECT try_sum(col) FROM VALUES (interval '2147483647 months'), (interval '1 months') AS tab(col)
-- !query schema
struct<try_sum(col):interval month>
-- !query output
NULL


-- !query
SELECT try_sum(col) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col)
-- !query schema
struct<try_sum(col):interval second>
-- !query output
0 00:00:02.000000000


-- !query
SELECT try_sum(col) FROM VALUES (interval '106751991 DAYS'), (interval '1 DAYS') AS tab(col)
-- !query schema
struct<try_sum(col):interval day>
-- !query output
NULL


-- !query
SELECT try_sum(col / 0) FROM VALUES (5), (10), (15) AS tab(col)
-- !query schema
struct<try_sum((col / 0)):double>
-- !query output
NULL


-- !query
SELECT try_sum(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col)
-- !query schema
struct<try_sum((col / 0)):decimal(18,6)>
-- !query output
NULL


-- !query
SELECT try_sum(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col)
-- !query schema
struct<try_sum((col / 0)):double>
-- !query output
NULL


-- !query
SELECT try_sum(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col)
-- !query schema
struct<try_sum((col + 1)):bigint>
-- !query output
-9223372036854775806


-- !query
SELECT try_sum(col / 0) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
{
"errorClass" : "INTERVAL_DIVIDED_BY_ZERO",
"sqlState" : "22012",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 16,
"stopIndex" : 22,
"fragment" : "col / 0"
} ]
}


-- !query
SELECT try_sum(col / 0) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
{
"errorClass" : "INTERVAL_DIVIDED_BY_ZERO",
"sqlState" : "22012",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 16,
"stopIndex" : 22,
"fragment" : "col / 0"
} ]
}


-- !query
SELECT try_avg(col) FROM VALUES (5), (10), (15) AS tab(col)
-- !query schema
struct<try_avg(col):double>
-- !query output
10.0


-- !query
SELECT try_avg(col) FROM VALUES (5.0), (10.0), (15.0) AS tab(col)
-- !query schema
struct<try_avg(col):decimal(7,5)>
-- !query output
10.00000


-- !query
SELECT try_avg(col) FROM VALUES (NULL), (10), (15) AS tab(col)
-- !query schema
struct<try_avg(col):double>
-- !query output
12.5


-- !query
SELECT try_avg(col) FROM VALUES (NULL), (NULL) AS tab(col)
-- !query schema
struct<try_avg(col):double>
-- !query output
NULL


-- !query
SELECT try_avg(col) FROM VALUES (9223372036854775807L), (1L) AS tab(col)
-- !query schema
struct<try_avg(col):double>
-- !query output
4.611686018427388E18


-- !query
SELECT try_avg(col) FROM VALUES (98765432109876543210987654321098765432BD), (98765432109876543210987654321098765432BD) AS tab(col)
-- !query schema
struct<try_avg(col):decimal(38,4)>
-- !query output
NULL


-- !query
SELECT try_avg(col) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col)
-- !query schema
struct<try_avg(col):interval year to month>
-- !query output
0-1


-- !query
SELECT try_avg(col) FROM VALUES (interval '2147483647 months'), (interval '1 months') AS tab(col)
-- !query schema
struct<try_avg(col):interval year to month>
-- !query output
NULL


-- !query
SELECT try_avg(col) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col)
-- !query schema
struct<try_avg(col):interval day to second>
-- !query output
0 00:00:01.000000000


-- !query
SELECT try_avg(col) FROM VALUES (interval '106751991 DAYS'), (interval '1 DAYS') AS tab(col)
-- !query schema
struct<try_avg(col):interval day to second>
-- !query output
NULL


-- !query
SELECT try_avg(col / 0) FROM VALUES (5), (10), (15) AS tab(col)
-- !query schema
struct<try_avg((col / 0)):double>
-- !query output
NULL


-- !query
SELECT try_avg(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col)
-- !query schema
struct<try_avg((col / 0)):decimal(12,10)>
-- !query output
NULL


-- !query
SELECT try_avg(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col)
-- !query schema
struct<try_avg((col / 0)):double>
-- !query output
NULL


-- !query
SELECT try_avg(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col)
-- !query schema
struct<try_avg((col + 1)):double>
-- !query output
-4.611686018427388E18


-- !query
SELECT try_avg(col / 0) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
{
"errorClass" : "INTERVAL_DIVIDED_BY_ZERO",
"sqlState" : "22012",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 16,
"stopIndex" : 22,
"fragment" : "col / 0"
} ]
}


-- !query
SELECT try_avg(col / 0) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
{
"errorClass" : "INTERVAL_DIVIDED_BY_ZERO",
"sqlState" : "22012",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 16,
"stopIndex" : 22,
"fragment" : "col / 0"
} ]
}
Loading

0 comments on commit 87a5b37

Please sign in to comment.