diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 6f1f69d72..8785e907a 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -8,9 +8,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 cache: true - name: Cache uses: actions/cache@v3 @@ -39,9 +39,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -68,9 +68,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -156,9 +156,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -183,9 +183,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -210,9 +210,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -238,9 +238,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -265,9 +265,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -319,7 +319,7 @@ jobs: ls -la python -m pip install vegafusion-*.whl python -m pip install vegafusion_python_embed-*manylinux_2_17_x86_64*.whl - python -m pip install pytest vega-datasets polars-lts-cpu duckdb==0.9.2 "vl-convert-python>=1.0.1rc1" scikit-image pandas==2.0 + python -m pip install pytest vega-datasets polars-lts-cpu "duckdb>=1.0" "vl-convert-python>=1.0.1rc1" scikit-image "pandas>=2.2" - name: Test lazy imports working-directory: python/vegafusion/ run: python checks/check_lazy_imports.py @@ -353,7 +353,7 @@ jobs: ls -la python -m pip install vegafusion-*.whl python -m pip install vegafusion_python_embed-*macosx_10_*_x86_64.whl - python -m pip install pytest vega-datasets polars-lts-cpu duckdb==0.9.2 vl-convert-python scikit-image pandas==2.0 + python -m pip install pytest vega-datasets polars-lts-cpu "duckdb>=1.0" vl-convert-python scikit-image "pandas>=2.2" python -m pip install pyarrow==10.0 altair==5.1.2 - name: Test vegafusion working-directory: python/vegafusion/ @@ -389,7 +389,7 @@ jobs: python -m pip install $vegafusion python -m pip install $vegafusion_python_embed - python -m pip install pytest vega-datasets polars[timezone] duckdb==0.9.2 vl-convert-python scikit-image + python -m pip install pytest vega-datasets polars[timezone] "duckdb>=1.0" vl-convert-python scikit-image - name: Test vegafusion working-directory: python/vegafusion/ run: pytest @@ -403,9 +403,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -455,9 +455,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -564,9 +564,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -596,9 +596,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: @@ -627,9 +627,9 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # pin@v3.5.2 - - uses: prefix-dev/setup-pixi@v0.5.1 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - pixi-version: v0.13.0 + pixi-version: v0.27.1 - name: Cache uses: actions/cache@v3 with: diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..428398df2 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "[rust]": { + "editor.defaultFormatter": "rust-lang.rust-analyzer", + "editor.formatOnSave": true + } +} diff --git a/Cargo.lock b/Cargo.lock index 4324d33f3..15a9a737b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -156,9 +156,9 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "arrow" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa285343fba4d829d49985bdc541e3789cf6000ed0e84be7c039438df4a4e78c" +checksum = "219d05930b81663fd3b32e3bde8ce5bff3c4d23052a99f11a8fa50a3b47b2658" dependencies = [ "arrow-arith", "arrow-array", @@ -178,9 +178,9 @@ dependencies = [ [[package]] name = "arrow-arith" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "753abd0a5290c1bcade7c6623a556f7d1659c5f4148b140b5b63ce7bd1a45705" +checksum = "0272150200c07a86a390be651abdd320a2d12e84535f0837566ca87ecd8f95e0" dependencies = [ "arrow-array", "arrow-buffer", @@ -193,9 +193,9 @@ dependencies = [ [[package]] name = "arrow-array" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d390feeb7f21b78ec997a4081a025baef1e2e0d6069e181939b61864c9779609" +checksum = "8010572cf8c745e242d1b632bd97bd6d4f40fefed5ed1290a8f433abaa686fea" dependencies = [ "ahash", "arrow-buffer", @@ -210,9 +210,9 @@ dependencies = [ [[package]] name = "arrow-buffer" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69615b061701bcdffbc62756bc7e85c827d5290b472b580c972ebbbf690f5aa4" +checksum = "0d0a2432f0cba5692bf4cb757469c66791394bac9ec7ce63c1afe74744c37b27" dependencies = [ "bytes", "half", @@ -221,28 +221,30 @@ dependencies = [ [[package]] name = "arrow-cast" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e448e5dd2f4113bf5b74a1f26531708f5edcacc77335b7066f9398f4bcf4cdef" +checksum = "9abc10cd7995e83505cc290df9384d6e5412b207b79ce6bdff89a10505ed2cba" dependencies = [ "arrow-array", "arrow-buffer", "arrow-data", "arrow-schema", "arrow-select", - "base64", + "atoi", + "base64 0.22.1", "chrono", "comfy-table", "half", "lexical-core", "num", + "ryu", ] [[package]] name = "arrow-csv" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46af72211f0712612f5b18325530b9ad1bfbdc87290d5fbfd32a7da128983781" +checksum = "95cbcba196b862270bf2a5edb75927380a7f3a163622c61d40cbba416a6305f2" dependencies = [ "arrow-array", "arrow-buffer", @@ -259,9 +261,9 @@ dependencies = [ [[package]] name = "arrow-data" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67d644b91a162f3ad3135ce1184d0a31c28b816a581e08f29e8e9277a574c64e" +checksum = "2742ac1f6650696ab08c88f6dd3f0eb68ce10f8c253958a18c943a68cd04aec5" dependencies = [ "arrow-buffer", "arrow-schema", @@ -271,9 +273,9 @@ dependencies = [ [[package]] name = "arrow-ipc" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03dea5e79b48de6c2e04f03f62b0afea7105be7b77d134f6c5414868feefb80d" +checksum = "a42ea853130f7e78b9b9d178cb4cd01dee0f78e64d96c2949dc0a915d6d9e19d" dependencies = [ "arrow-array", "arrow-buffer", @@ -286,9 +288,9 @@ dependencies = [ [[package]] name = "arrow-json" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8950719280397a47d37ac01492e3506a8a724b3fb81001900b866637a829ee0f" +checksum = "eaafb5714d4e59feae964714d724f880511500e3569cc2a94d02456b403a2a49" dependencies = [ "arrow-array", "arrow-buffer", @@ -306,9 +308,9 @@ dependencies = [ [[package]] name = "arrow-ord" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ed9630979034077982d8e74a942b7ac228f33dd93a93b615b4d02ad60c260be" +checksum = "e3e6b61e3dc468f503181dccc2fc705bdcc5f2f146755fa5b56d0a6c5943f412" dependencies = [ "arrow-array", "arrow-buffer", @@ -321,9 +323,9 @@ dependencies = [ [[package]] name = "arrow-row" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "007035e17ae09c4e8993e4cb8b5b96edf0afb927cd38e2dff27189b274d83dcf" +checksum = "848ee52bb92eb459b811fb471175ea3afcf620157674c8794f539838920f9228" dependencies = [ "ahash", "arrow-array", @@ -336,18 +338,18 @@ dependencies = [ [[package]] name = "arrow-schema" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ff3e9c01f7cd169379d269f926892d0e622a704960350d09d331be3ec9e0029" +checksum = "02d9483aaabe910c4781153ae1b6ae0393f72d9ef757d38d09d450070cf2e528" dependencies = [ "bitflags 2.4.2", ] [[package]] name = "arrow-select" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce20973c1912de6514348e064829e50947e35977bb9d7fb637dc99ea9ffd78c" +checksum = "849524fa70e0e3c5ab58394c770cb8f514d0122d20de08475f7b472ed8075830" dependencies = [ "ahash", "arrow-array", @@ -359,15 +361,16 @@ dependencies = [ [[package]] name = "arrow-string" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f3b37f2aeece31a2636d1b037dabb69ef590e03bdc7eb68519b51ec86932a7" +checksum = "9373cb5a021aee58863498c37eb484998ef13377f69989c6c5ccfbd258236cdb" dependencies = [ "arrow-array", "arrow-buffer", "arrow-data", "arrow-schema", "arrow-select", + "memchr", "num", "regex", "regex-syntax", @@ -605,6 +608,15 @@ dependencies = [ "syn 2.0.50", ] +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -703,6 +715,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "bitflags" version = "1.3.2" @@ -1218,9 +1236,9 @@ dependencies = [ [[package]] name = "datafusion" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b360b692bf6c6d6e6b6dbaf41a3be0020daeceac0f406aed54c75331e50dbb" +checksum = "05fb4eeeb7109393a0739ac5b8fd892f95ccef691421491c85544f7997366f68" dependencies = [ "ahash", "arrow", @@ -1234,9 +1252,11 @@ dependencies = [ "chrono", "dashmap", "datafusion-common", + "datafusion-common-runtime", "datafusion-execution", "datafusion-expr", "datafusion-functions", + "datafusion-functions-aggregate", "datafusion-functions-array", "datafusion-optimizer", "datafusion-physical-expr", @@ -1256,7 +1276,7 @@ dependencies = [ "parquet", "pin-project-lite", "rand", - "sqlparser 0.43.1", + "sqlparser", "tempfile", "tokio", "tokio-util", @@ -1268,9 +1288,9 @@ dependencies = [ [[package]] name = "datafusion-common" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37f343ccc298f440e25aa38ff82678291a7acc24061c7370ba6c0ff5cc811412" +checksum = "741aeac15c82f239f2fc17deccaab19873abbd62987be20023689b15fa72fa09" dependencies = [ "ahash", "arrow", @@ -1279,19 +1299,29 @@ dependencies = [ "arrow-schema", "chrono", "half", + "instant", "libc", "num_cpus", "object_store", "parquet", "pyo3", - "sqlparser 0.43.1", + "sqlparser", +] + +[[package]] +name = "datafusion-common-runtime" +version = "38.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e8ddfb8d8cb51646a30da0122ecfffb81ca16919ae9a3495a9e7468bdcd52b8" +dependencies = [ + "tokio", ] [[package]] name = "datafusion-execution" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9c93043081487e335399a21ebf8295626367a647ac5cb87d41d18afad7d0f7" +checksum = "282122f90b20e8f98ebfa101e4bf20e718fd2684cf81bef4e8c6366571c64404" dependencies = [ "arrow", "chrono", @@ -1310,54 +1340,90 @@ dependencies = [ [[package]] name = "datafusion-expr" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e204d89909e678846b6a95f156aafc1ee5b36cb6c9e37ec2e1449b078a38c818" +checksum = "5478588f733df0dfd87a62671c7478f590952c95fa2fa5c137e3ff2929491e22" dependencies = [ "ahash", "arrow", "arrow-array", + "chrono", "datafusion-common", "paste", - "sqlparser 0.43.1", + "serde_json", + "sqlparser", "strum 0.26.1", "strum_macros 0.26.1", ] [[package]] name = "datafusion-functions" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98f1c73f7801b2b8ba2297b3ad78ffcf6c1fc6b8171f502987eb9ad5cb244ee7" +checksum = "f4afd261cea6ac9c3ca1192fd5e9f940596d8e9208c5b1333f4961405db53185" dependencies = [ "arrow", - "base64", + "base64 0.22.1", + "blake2", + "blake3", + "chrono", "datafusion-common", "datafusion-execution", "datafusion-expr", + "datafusion-physical-expr", + "hashbrown 0.14.3", "hex", + "itertools 0.12.1", + "log", + "md-5", + "rand", + "regex", + "sha2", + "unicode-segmentation", + "uuid", +] + +[[package]] +name = "datafusion-functions-aggregate" +version = "38.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b36a6c4838ab94b5bf8f7a96ce6ce059d805c5d1dcaa6ace49e034eb65cd999" +dependencies = [ + "arrow", + "datafusion-common", + "datafusion-execution", + "datafusion-expr", + "datafusion-physical-expr-common", "log", + "paste", + "sqlparser", ] [[package]] name = "datafusion-functions-array" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d16a0ddf2c991526f6ffe2f47a72c6da0b7354d6c32411dd20631fe2e38937" +checksum = "d5fdd200a6233f48d3362e7ccb784f926f759100e44ae2137a5e2dcb986a59c4" dependencies = [ "arrow", + "arrow-array", + "arrow-buffer", + "arrow-ord", + "arrow-schema", "datafusion-common", "datafusion-execution", "datafusion-expr", + "datafusion-functions", + "itertools 0.12.1", "log", "paste", ] [[package]] name = "datafusion-optimizer" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ae27e07bf1f04d327be5c2a293470879801ab5535204dc3b16b062fda195496" +checksum = "54f2820938810e8a2d71228fd6f59f33396aebc5f5f687fcbf14de5aab6a7e1a" dependencies = [ "arrow", "async-trait", @@ -1366,6 +1432,7 @@ dependencies = [ "datafusion-expr", "datafusion-physical-expr", "hashbrown 0.14.3", + "indexmap 2.2.3", "itertools 0.12.1", "log", "regex-syntax", @@ -1373,9 +1440,9 @@ dependencies = [ [[package]] name = "datafusion-physical-expr" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde620cd9ef76a3bca9c754fb68854bd2349c49f55baf97e08001f9e967f6d6b" +checksum = "9adf8eb12716f52ddf01e09eb6c94d3c9b291e062c05c91b839a448bddba2ff8" dependencies = [ "ahash", "arrow", @@ -1384,46 +1451,56 @@ dependencies = [ "arrow-ord", "arrow-schema", "arrow-string", - "base64", - "blake2", - "blake3", + "base64 0.22.1", "chrono", "datafusion-common", "datafusion-execution", "datafusion-expr", + "datafusion-functions-aggregate", + "datafusion-physical-expr-common", "half", "hashbrown 0.14.3", "hex", "indexmap 2.2.3", "itertools 0.12.1", "log", - "md-5", "paste", "petgraph", - "rand", "regex", - "sha2", - "unicode-segmentation", - "uuid", +] + +[[package]] +name = "datafusion-physical-expr-common" +version = "38.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d5472c3230584c150197b3f2c23f2392b9dc54dbfb62ad41e7e36447cfce4be" +dependencies = [ + "arrow", + "datafusion-common", + "datafusion-expr", ] [[package]] name = "datafusion-physical-plan" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a4c75fba9ea99d64b2246cbd2fcae2e6fc973e6616b1015237a616036506dd4" +checksum = "18ae750c38389685a8b62e5b899bbbec488950755ad6d218f3662d35b800c4fe" dependencies = [ "ahash", "arrow", "arrow-array", "arrow-buffer", + "arrow-ord", "arrow-schema", "async-trait", "chrono", "datafusion-common", + "datafusion-common-runtime", "datafusion-execution", "datafusion-expr", + "datafusion-functions-aggregate", "datafusion-physical-expr", + "datafusion-physical-expr-common", "futures", "half", "hashbrown 0.14.3", @@ -1435,14 +1512,13 @@ dependencies = [ "pin-project-lite", "rand", "tokio", - "uuid", ] [[package]] name = "datafusion-proto" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2292251d5441d88d92a90d1511d5a8c88759a6562ff38ac1711b1587e6bf19c4" +checksum = "e6f4d2df0e7ba676fe9c0b7cbc0768ffc8f736600b58d305c6c70555c1259bd4" dependencies = [ "arrow", "chrono", @@ -1455,16 +1531,18 @@ dependencies = [ [[package]] name = "datafusion-sql" -version = "36.0.0" +version = "38.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21474a95c3a62d113599d21b439fa15091b538bac06bd20be0bb2e7d22903c09" +checksum = "befc67a3cdfbfa76853f43b10ac27337821bb98e519ab6baf431fcc0bcfcafdb" dependencies = [ "arrow", + "arrow-array", "arrow-schema", "datafusion-common", "datafusion-expr", "log", - "sqlparser 0.43.1", + "sqlparser", + "strum 0.26.1", ] [[package]] @@ -2618,18 +2696,19 @@ dependencies = [ [[package]] name = "object_store" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d139f545f64630e2e3688fd9f81c470888ab01edeb72d13b4e86c566f1130000" +checksum = "b8718f8b65fdf67a45108d1548347d4af7d71fb81ce727bbf9e3b2535e079db3" dependencies = [ "async-trait", - "base64", + "base64 0.21.7", "bytes", "chrono", "futures", "humantime", "hyper", "itertools 0.12.1", + "md-5", "parking_lot 0.12.1", "percent-encoding", "quick-xml", @@ -2743,9 +2822,9 @@ dependencies = [ [[package]] name = "parquet" -version = "50.0.0" +version = "51.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "547b92ebf0c1177e3892f44c8f79757ee62e678d564a9834189725f2c5b7a750" +checksum = "096795d4f47f65fd3ee1ec5a98b77ab26d602f2cc785b0e4be5443add17ecc32" dependencies = [ "ahash", "arrow-array", @@ -2755,7 +2834,7 @@ dependencies = [ "arrow-ipc", "arrow-schema", "arrow-select", - "base64", + "base64 0.22.1", "brotli", "bytes", "chrono", @@ -3298,7 +3377,7 @@ version = "0.11.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251" dependencies = [ - "base64", + "base64 0.21.7", "bytes", "encoding_rs", "futures-core", @@ -3523,7 +3602,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64", + "base64 0.21.7", ] [[package]] @@ -3770,18 +3849,9 @@ checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "sqlparser" -version = "0.41.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc2c25a6c66789625ef164b4c7d2e548d627902280c13710d33da8222169964" -dependencies = [ - "log", -] - -[[package]] -name = "sqlparser" -version = "0.43.1" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f95c4bae5aba7cd30bd506f7140026ade63cff5afd778af8854026f9606bf5d4" +checksum = "f7bbffee862a796d67959a89859d6b1046bb5016d63e23835ad0da182777bbe0" dependencies = [ "log", "sqlparser_derive", @@ -4179,7 +4249,7 @@ dependencies = [ "async-stream", "async-trait", "axum", - "base64", + "base64 0.21.7", "bytes", "h2", "http", @@ -4219,7 +4289,7 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fddb2a37b247e6adcb9f239f4e5cefdcc5ed526141a416b943929f13aea2cce" dependencies = [ - "base64", + "base64 0.21.7", "bytes", "http", "http-body", @@ -4436,16 +4506,17 @@ name = "vegafusion-common" version = "1.6.9" dependencies = [ "arrow", - "base64", + "base64 0.21.7", "chrono", "datafusion-common", "datafusion-expr", + "datafusion-functions", "datafusion-proto", "jni", "object_store", "pyo3", "serde_json", - "sqlparser 0.41.0", + "sqlparser", "thiserror", ] @@ -4472,7 +4543,7 @@ dependencies = [ "regex", "serde", "serde_json", - "sqlparser 0.41.0", + "sqlparser", "thiserror", "tonic", "tonic-build", @@ -4488,6 +4559,7 @@ dependencies = [ "datafusion-common", "datafusion-expr", "pyo3", + "sqlparser", "vegafusion-common", ] @@ -4497,6 +4569,7 @@ version = "1.6.9" dependencies = [ "chrono", "chrono-tz", + "datafusion-functions", "datafusion-physical-expr", "lazy_static", "ordered-float 3.9.2", @@ -4548,7 +4621,7 @@ dependencies = [ "async-lock 2.8.0", "async-recursion", "async-trait", - "base64", + "base64 0.21.7", "bytes", "chrono", "chrono-tz", @@ -4556,6 +4629,7 @@ dependencies = [ "datafusion-common", "datafusion-expr", "datafusion-functions", + "datafusion-functions-array", "datafusion-optimizer", "datafusion-physical-expr", "deterministic-hash", @@ -4582,7 +4656,7 @@ dependencies = [ "rstest", "serde", "serde_json", - "sqlparser 0.41.0", + "sqlparser", "tempfile", "test-case", "tokio", @@ -4629,6 +4703,7 @@ dependencies = [ "datafusion", "datafusion-common", "datafusion-expr", + "datafusion-functions", "deterministic-hash", "lazy_static", "log", @@ -4642,7 +4717,7 @@ dependencies = [ "rstest_reuse", "serde", "serde_json", - "sqlparser 0.41.0", + "sqlparser", "tempfile", "tokio", "toml", diff --git a/Cargo.toml b/Cargo.toml index 0645f107f..4d9850d12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,37 +14,40 @@ members = [ ] [workspace.dependencies] -arrow = { version = "50.0.0", default_features = false } -sqlparser = { version = "0.41.0" } -chrono = { version = "0.4.31", default_features = false } +arrow = { version = "51.0.0", default-features = false } +sqlparser = { version = "0.45.0" } +chrono = { version = "0.4.31", default-features = false } reqwest = { version = "0.11.22", default-features = false } tokio = { version = "1.36.0" } pyo3 = { version = "0.20.0" } pythonize = { version = "0.20.0" } -prost = { version = "0.12.1" } -prost-types = { version = "0.12.1" } -object_store = { version= "0.9.0" } +prost = { version = "0.12.3" } +prost-types = { version = "0.12.3" } +object_store = { version= "0.9.1" } [workspace.dependencies.datafusion] -version = "36.0.0" +version = "38.0.0" [workspace.dependencies.datafusion-common] -version = "36.0.0" +version = "38.0.0" [workspace.dependencies.datafusion-expr] -version = "36.0.0" +version = "38.0.0" [workspace.dependencies.datafusion-proto] -version = "36.0.0" +version = "38.0.0" [workspace.dependencies.datafusion-physical-expr] -version = "36.0.0" +version = "38.0.0" [workspace.dependencies.datafusion-optimizer] -version = "36.0.0" +version = "38.0.0" [workspace.dependencies.datafusion-functions] -version = "36.0.0" +version = "38.0.0" + +[workspace.dependencies.datafusion-functions-array] +version = "38.0.0" [profile.release] ## Tell `rustc` to use highest performance optimization and perform Link Time Optimization diff --git a/pixi.lock b/pixi.lock index 63e55e62c..fd0a29310 100644 --- a/pixi.lock +++ b/pixi.lock @@ -1,4 +1,4 @@ -version: 4 +version: 5 environments: default: channels: @@ -7,6 +7,7 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/altair-5.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.0.0-pyhd8ed1ab_0.conda @@ -15,7 +16,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py310h2372a71_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/arrow-cpp-12.0.1-ha770c72_12_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda @@ -81,11 +81,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/future-0.18.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.2.0-h338b0a0_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.6.0-h6f12383_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.2.1-h58526e2_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda @@ -121,7 +119,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.9-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupytext-1.15.0-pyhcff175f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-h7f98852_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_16.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.3-pyhd8ed1ab_0.conda @@ -150,22 +148,22 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.12.0-h840a212_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.56.2-h3905398_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.55.1-h47da74e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnuma-2.0.16-h0b41bf4_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.23.3-hd1fb520_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-13.2.0-h7e041cc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.19.0-hb90f79a_1.conda @@ -175,7 +173,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.44.2-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda @@ -200,17 +198,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310hb13e2d6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjdk-20.0.0-hfea2f88_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-1.9.0-h385abfd_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/outcome-1.3.0.post0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py310h7cbd5c2_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.1.3-h32600fe_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.3-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.3-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-h0f59acf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pep517-0.13.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.8.0-pyh1a96a4e_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 @@ -238,7 +236,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-0.8.1-py310h8e3e826_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-1.0.0-py310hea249c9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.18.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda @@ -256,8 +254,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.10.6-py310hcb5633a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rust-1.75.0-h70c747d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.75.0-h2c6d0dc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rust-1.80.1-h0a17960_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.80.1-h2c6d0dc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.54-h06160fa_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.21.0-py310hc6cd4ac_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py310hb13e2d6_1.conda @@ -272,12 +270,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-1.7.0-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_16.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tenacity-8.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.17.1-pyh41d4057_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tifffile-2023.9.26-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.2.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.12.2-pyha770c72_0.conda @@ -295,7 +293,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vega_datasets-0.9.0-pyhd3deb0d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/vl-convert-python-1.3.0-py310h2372a71_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/vl-convert-python-1.6.0-py310h5b4e0ec_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/voila-0.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/watchfiles-0.21.0-py310hcb5633a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.9-pyhd8ed1ab_0.conda @@ -331,9 +329,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.0-h59595ed_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.0.7-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda osx-64: - conda: https://conda.anaconda.org/conda-forge/noarch/altair-5.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.0.0-pyhd8ed1ab_0.conda @@ -343,7 +341,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py310h6729b98_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/arrow-cpp-12.0.1-h694c41f_12_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda @@ -538,7 +535,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.10.13-h00d2728_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/python-duckdb-0.8.1-py310h1ba7dce_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-duckdb-1.0.0-py310he0a0c5d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.18.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda @@ -555,8 +552,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.10.6-py310h0e083fb_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rust-1.75.0-h7e1429e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.75.0-h38e4360_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rust-1.80.1-h6c54e5d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.80.1-h38e4360_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-image-0.21.0-py310h9e9d8ca_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.11.3-py310h2db466d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/selenium-4.11.2-pyhd8ed1ab_1.conda @@ -591,7 +588,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vega_datasets-0.9.0-pyhd3deb0d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/vl-convert-python-1.3.0-py310h7664a31_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/vl-convert-python-1.6.0-py310h936d840_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/voila-0.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/watchfiles-0.21.0-py310h0e083fb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.9-pyhd8ed1ab_0.conda @@ -623,7 +620,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-21.3.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/argon2-cffi-bindings-21.2.0-py310h8e9501a_3.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.2.3-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/arrow-cpp-12.0.1-h1b749cb_8_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/async_generator-1.10-py_0.tar.bz2 @@ -848,7 +844,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.12-h01493a6_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-duckdb-0.8.1-py310h1d8123b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-duckdb-1.0.0-py310hcf9f62a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda @@ -864,8 +860,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.9.2-py310had9acf8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.75.0-h4ff7c5d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.75.0-hf6ec828_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.80.1-h4ff7c5d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.80.1-hf6ec828_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-image-0.21.0-py310h1253130_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.11.1-py310h0975f3d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/selenium-4.11.2-pyhd8ed1ab_1.conda @@ -898,7 +894,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vega_datasets-0.9.0-pyhd3deb0d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/vl-convert-python-1.3.0-py310hd125d64_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/vl-convert-python-1.6.0-py310ha6dd24b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/voila-0.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/watchfiles-0.21.0-py310hd442715_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.6-pyhd8ed1ab_0.conda @@ -929,7 +925,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py310h8d17308_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/arrow-cpp-12.0.1-h57928b3_12_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda @@ -1116,7 +1111,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.10.13-h4de0772_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/python-duckdb-0.8.1-py310hb400963_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python-duckdb-1.0.0-py310h9e98ed7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.18.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda @@ -1134,8 +1129,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.10.6-py310h87d50f1_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rust-1.75.0-hf8d6059_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.75.0-h17fc481_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rust-1.80.1-hf8d6059_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.80.1-h17fc481_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-image-0.21.0-py310h00ffb61_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.11.3-py310hf667824_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/selenium-4.11.2-pyhd8ed1ab_1.conda @@ -1173,11 +1168,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h64f974e_17.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.36.32532-hdcecf7f_17.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_20.conda - conda: https://conda.anaconda.org/conda-forge/noarch/vega_datasets-0.9.0-pyhd3deb0d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/vl-convert-python-1.3.0-py310hdc45392_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vl-convert-python-1.6.0-py310hb47754f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/voila-0.5.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.36.32532-h05e6639_17.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda - conda: https://conda.anaconda.org/conda-forge/win-64/watchfiles-0.21.0-py310h87d50f1_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.9-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-1.13-pyhd8ed1ab_0.conda @@ -1234,6 +1229,20 @@ packages: license_family: BSD size: 23621 timestamp: 1650670423406 +- kind: conda + name: _sysroot_linux-64_curr_repodata_hack + version: '3' + build: h69a702a_16 + build_number: 16 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda + sha256: 6ac30acdbfd3136ee7a1de28af4355165291627e905715611726e674499b0786 + md5: 1c005af0c6ff22814b7c52ee448d4bea + license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 + license_family: GPL + size: 20798 + timestamp: 1720621358501 - kind: conda name: alsa-lib version: 1.2.10 @@ -1600,75 +1609,6 @@ packages: license_family: Apache size: 100096 timestamp: 1696129131844 -- kind: conda - name: arrow-cpp - version: 12.0.1 - build: h1b749cb_8_cpu - build_number: 8 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/arrow-cpp-12.0.1-h1b749cb_8_cpu.conda - sha256: dda0165d3019cb226f17c075ae0956b03dcb24c3ebfe96135a36be18a55d2a94 - md5: f6a6ab36e1e5af42ba0d1afa707caa0c - depends: - - libarrow ==12.0.1 hb74b275_8_cpu - - libprotobuf >=4.23.3,<4.23.4.0a0 - arch: aarch64 - platform: osx - license: Apache-2.0 - license_family: APACHE - size: 30292 - timestamp: 1691481385878 -- kind: conda - name: arrow-cpp - version: 12.0.1 - build: h57928b3_12_cpu - build_number: 12 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/arrow-cpp-12.0.1-h57928b3_12_cpu.conda - sha256: d2ade2cb6f0e45014d234dca44e8650e860c16c11917bbb1e61ffc8900f8bf7d - md5: 7c9ce3ded343e8b337d2ef927e93dd6a - depends: - - libarrow ==12.0.1 hba3d5be_12_cpu - arch: x86_64 - platform: win - license: Apache-2.0 - license_family: APACHE - size: 30331 - timestamp: 1694159240993 -- kind: conda - name: arrow-cpp - version: 12.0.1 - build: h694c41f_12_cpu - build_number: 12 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/arrow-cpp-12.0.1-h694c41f_12_cpu.conda - sha256: 1a0401269292ceeccdd6ad425b8fb2c4d32539bef18278e5cb658edae67f25fe - md5: 6143c98042c8aafae206812d445af9a8 - depends: - - libarrow ==12.0.1 hca2412d_12_cpu - arch: x86_64 - platform: osx - license: Apache-2.0 - license_family: APACHE - size: 29984 - timestamp: 1694159698773 -- kind: conda - name: arrow-cpp - version: 12.0.1 - build: ha770c72_12_cpu - build_number: 12 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/arrow-cpp-12.0.1-ha770c72_12_cpu.conda - sha256: 4230cdd08f01b9d6e58852d2a920410027aaaaf2edd0078a28ac3293c111d8cb - md5: 7ff70243d479d882a4c3cd93902a11d3 - depends: - - libarrow ==12.0.1 h1ed0495_12_cpu - arch: x86_64 - platform: linux - license: Apache-2.0 - license_family: APACHE - size: 29822 - timestamp: 1694158713374 - kind: conda name: asttokens version: 2.2.1 @@ -2795,10 +2735,8 @@ packages: - libcurl >=8.3.0,<9.0a0 - libgcc-ng >=12 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openssl >=3.1.3,<4.0a0 - arch: x86_64 - platform: linux license: Apache-2.0 license_family: Apache size: 3432093 @@ -3039,12 +2977,10 @@ packages: depends: - libgcc-ng >=12 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - snappy >=1.1.10,<2.0a0 + - snappy >=1.1.10,<1.2.0a0 - zstd >=1.5.5,<1.6.0a0 - arch: x86_64 - platform: linux license: BSD-3-Clause license_family: BSD size: 48692 @@ -3695,7 +3631,7 @@ packages: md5: f907bb958910dc404647326ca80c263e depends: - fontconfig >=2.14.2,<3.0a0 - - fonts-conda-ecosystem * + - fonts-conda-ecosystem - freetype >=2.12.1,<3.0a0 - icu >=73.2,<74.0a0 - libgcc-ng >=12 @@ -3703,16 +3639,14 @@ packages: - libpng >=1.6.39,<1.7.0a0 - libstdcxx-ng >=12 - libxcb >=1.15,<1.16.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - pixman >=0.42.2,<1.0a0 - xorg-libice >=1.1.1,<2.0a0 - xorg-libsm >=1.2.4,<2.0a0 - xorg-libx11 >=1.8.6,<2.0a0 - xorg-libxext >=1.3.4,<2.0a0 - xorg-libxrender >=0.9.11,<0.10.0a0 - - zlib * - arch: x86_64 - platform: linux + - zlib license: LGPL-2.1-only or MPL-1.1 size: 982351 timestamp: 1697028423052 @@ -4545,9 +4479,7 @@ packages: - freetype >=2.12.1,<3.0a0 - libgcc-ng >=12 - libuuid >=2.32.1,<3.0a0 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux + - libzlib >=1.2.13,<2.0.0a0 license: MIT license_family: MIT size: 272010 @@ -4637,9 +4569,7 @@ packages: depends: - libgcc-ng >=12 - libpng >=1.6.39,<1.7.0a0 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux + - libzlib >=1.2.13,<2.0.0a0 license: GPL-2.0-only OR FTL size: 634972 timestamp: 1694615932610 @@ -4784,21 +4714,6 @@ packages: license: LGPL-2.1-or-later AND GPL-3.0-or-later size: 4021036 timestamp: 1665674192347 -- kind: conda - name: gettext - version: 0.21.1 - build: h27087fc_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2 - sha256: 4fcfedc44e4c9a053f0416f9fc6ab6ed50644fca3a761126dbd00d09db1f546a - md5: 14947d8770185e5153fdd04d4673ed37 - depends: - - libgcc-ng >=12 - arch: x86_64 - platform: linux - license: LGPL-2.1-or-later AND GPL-3.0-or-later - size: 4320628 - timestamp: 1665673494324 - kind: conda name: gflags version: 2.2.2 @@ -5006,22 +4921,6 @@ packages: license_family: BSD size: 100624 timestamp: 1649143914155 -- kind: conda - name: gmp - version: 6.2.1 - build: h58526e2_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.2.1-h58526e2_0.tar.bz2 - sha256: 07a5319e1ac54fe5d38f50c60f7485af7f830b036da56957d0bfb7558a886198 - md5: b94cf2db16066b242ebd26db2facbd56 - depends: - - libgcc-ng >=7.5.0 - - libstdcxx-ng >=7.5.0 - arch: x86_64 - platform: linux - license: GPL-2.0-or-later AND LGPL-3.0-or-later - size: 825784 - timestamp: 1605751468661 - kind: conda name: graphite2 version: 1.3.13 @@ -5293,7 +5192,7 @@ packages: - blosc >=1.21.5,<2.0a0 - brunsli >=0.1,<1.0a0 - bzip2 >=1.0.8,<2.0a0 - - c-blosc2 >=2.10.2,<3.0a0 + - c-blosc2 >=2.10.2,<2.13.0a0 - charls >=2.4.2,<2.5.0a0 - giflib >=5.2.1,<5.3.0a0 - jxrlib >=1.1,<1.2.0a0 @@ -5311,23 +5210,19 @@ packages: - libstdcxx-ng >=12 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.3.2,<2.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - libzopfli >=1.0.3,<1.1.0a0 - lz4-c >=1.9.3,<1.10.0a0 - numpy >=1.22.4,<2.0a0 - openjpeg >=2.5.0,<3.0a0 - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 - - snappy >=1.1.10,<2.0a0 + - snappy >=1.1.10,<1.2.0a0 - xz >=5.2.6,<6.0a0 - zfp >=1.0.0,<2.0a0 - zstd >=1.5.5,<1.6.0a0 - arch: x86_64 - platform: linux license: BSD-3-Clause license_family: BSD - purls: - - pkg:pypi/imagecodecs size: 1924280 timestamp: 1695139671006 - kind: conda @@ -6680,22 +6575,22 @@ packages: timestamp: 1607309452074 - kind: conda name: kernel-headers_linux-64 - version: 2.6.32 - build: he073ed8_16 + version: 3.10.0 + build: h4a8ded7_16 build_number: 16 - subdir: linux-64 + subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_16.conda - sha256: aaa8aa6dc776d734a6702032588ff3c496721da905366d91162e3654c082aef0 - md5: 7ca122655873935e02c91279c5b03c8c + url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda + sha256: a55044e0f61058a5f6bab5e1dd7f15a1fa7a08ec41501dbfca5ab0fc50b9c0c1 + md5: ff7f38675b226cfb855aebfc32a13e31 + depends: + - _sysroot_linux-64_curr_repodata_hack 3.* constrains: - - sysroot_linux-64 ==2.12 - arch: x86_64 - platform: linux + - sysroot_linux-64 ==2.17 license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 license_family: GPL - size: 709007 - timestamp: 1689214970644 + size: 944344 + timestamp: 1720621422017 - kind: conda name: keyutils version: 1.6.1 @@ -7126,6 +7021,7 @@ packages: - aws-sdk-cpp >=1.11.156,<1.11.157.0a0 - bzip2 >=1.0.8,<2.0a0 - glog >=0.6.0,<0.7.0a0 + - libabseil * cxx17* - libabseil >=20230125.3,<20230126.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 @@ -7136,20 +7032,18 @@ packages: - libstdcxx-ng >=12 - libthrift >=0.19.0,<0.19.1.0a0 - libutf8proc >=2.8.0,<3.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.1.2,<4.0a0 - orc >=1.9.0,<1.9.1.0a0 - re2 >=2023.3.2,<2023.3.3.0a0 - - snappy >=1.1.10,<2.0a0 + - snappy >=1.1.10,<1.2.0a0 - ucx >=1.14.0,<1.15.0a0 - zstd >=1.5.5,<1.6.0a0 constrains: - apache-arrow-proc =*=cpu - parquet-cpp <0.0a0 - arrow-cpp =12.0.1 - arch: x86_64 - platform: linux license: Apache-2.0 license_family: APACHE size: 27625529 @@ -7820,9 +7714,7 @@ packages: - krb5 >=1.21.1,<1.22.0a0 - libgcc-ng >=12 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux + - libzlib >=1.2.13,<2.0.0a0 license: Apache-2.0 license_family: Apache size: 4519402 @@ -7882,11 +7774,9 @@ packages: - libgcc-ng >=12 - libnghttp2 >=1.52.0,<2.0a0 - libssh2 >=1.11.0,<2.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openssl >=3.1.3,<4.0a0 - zstd >=1.5.5,<1.6.0a0 - arch: x86_64 - platform: linux license: curl license_family: MIT size: 386160 @@ -8441,27 +8331,25 @@ packages: timestamp: 1688694829858 - kind: conda name: libglib - version: 2.78.0 - build: hebfc3b9_0 + version: 2.80.3 + build: h315aac3_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda - sha256: 96ec4dc5e38f434aa5862cb46d74923cce1445de3cd0b9d61e3e63102b163af6 - md5: e618003da3547216310088478e475945 + url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda + sha256: 7470e664b780b91708bed356cc634874dfc3d6f17cbf884a1d6f5d6d59c09f91 + md5: b0143a3e98136a680b728fdf9b42a258 depends: - - gettext >=0.21.1,<1.0a0 + - __glibc >=2.17,<3.0.a0 - libffi >=3.4,<4.0a0 - libgcc-ng >=12 - libiconv >=1.17,<2.0a0 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - - pcre2 >=10.40,<10.41.0a0 + - libzlib >=1.3.1,<2.0a0 + - pcre2 >=10.44,<10.45.0a0 constrains: - - glib 2.78.0 *_0 - arch: x86_64 - platform: linux + - glib 2.80.3 *_2 license: LGPL-2.1-or-later - size: 2701539 - timestamp: 1694381226310 + size: 3922900 + timestamp: 1723208802469 - kind: conda name: libgomp version: 13.2.0 @@ -8593,17 +8481,16 @@ packages: md5: 0b01e6ff8002994bd4ddbffcdbec7856 depends: - c-ares >=1.19.1,<2.0a0 + - libabseil * cxx17* - libabseil >=20230125.3,<20230126.0a0 - libgcc-ng >=12 - libprotobuf >=4.23.3,<4.23.4.0a0 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openssl >=3.1.2,<4.0a0 - re2 >=2023.3.2,<2023.3.3.0a0 constrains: - grpc-cpp =1.56.2 - arch: x86_64 - platform: linux license: Apache-2.0 license_family: APACHE size: 6331805 @@ -8909,27 +8796,6 @@ packages: license_family: MIT size: 564295 timestamp: 1677678452375 -- kind: conda - name: libnghttp2 - version: 1.55.1 - build: h47da74e_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.55.1-h47da74e_0.conda - sha256: 5e60b852dbde156ef1fa939af2491fe0e9eb3000de146786dede7cda8991ae4c - md5: a802251d1eaeeae041c867faf0f94fa8 - depends: - - c-ares >=1.20.1,<2.0a0 - - libev >=4.33,<4.34.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - - openssl >=3.1.4,<4.0a0 - arch: x86_64 - platform: linux - license: MIT - license_family: MIT - size: 627864 - timestamp: 1698429073582 - kind: conda name: libnghttp2 version: 1.55.1 @@ -8951,6 +8817,25 @@ packages: license_family: MIT size: 602957 timestamp: 1698429317306 +- kind: conda + name: libnghttp2 + version: 1.58.0 + build: h47da74e_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda + sha256: 151b18e4f92dcca263a6d23e4beb0c4e2287aa1c7d0587ff71ef50035ed34aca + md5: 9b13d5ee90fc9f09d54fd403247342b4 + depends: + - c-ares >=1.21.0,<2.0a0 + - libev >=4.33,<4.34.0a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + - libzlib >=1.2.13,<2.0.0a0 + - openssl >=3.1.4,<4.0a0 + license: MIT + license_family: MIT + size: 631397 + timestamp: 1699440427647 - kind: conda name: libnsl version: 2.0.1 @@ -9061,22 +8946,6 @@ packages: license: zlib-acknowledgement size: 343883 timestamp: 1669076173145 -- kind: conda - name: libpng - version: 1.6.39 - build: h753d276_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda - sha256: a32b36d34e4f2490b99bddbc77d01a674d304f667f0e62c89e02c961addef462 - md5: e1c890aebdebbfbf87e2c917187b4416 - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux - license: zlib-acknowledgement - size: 282599 - timestamp: 1669075729952 - kind: conda name: libpng version: 1.6.39 @@ -9107,6 +8976,20 @@ packages: license: zlib-acknowledgement size: 271689 timestamp: 1669075890643 +- kind: conda + name: libpng + version: 1.6.43 + build: h2797004_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda + sha256: 502f6ff148ac2777cc55ae4ade01a8fc3543b4ffab25c4e0eaa15f94e90dd997 + md5: 009981dd9cfcaa4dbfa25ffaed86bcae + depends: + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0.0a0 + license: zlib-acknowledgement + size: 288221 + timestamp: 1708780443939 - kind: conda name: libprotobuf version: 4.23.3 @@ -9157,12 +9040,11 @@ packages: sha256: 2e2a9b612b8ef8b928f8efac835cd2914722bbab348fa643b99db2efd3b34185 md5: 78c10e8637a6f8d377f9989327d0267d depends: + - libabseil * cxx17* - libabseil >=20230125.3,<20230126.0a0 - libgcc-ng >=12 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux + - libzlib >=1.2.13,<2.0.0a0 license: BSD-3-Clause license_family: BSD size: 2495057 @@ -9298,22 +9180,6 @@ packages: license: Unlicense size: 822883 timestamp: 1684265273102 -- kind: conda - name: libsqlite - version: 3.44.0 - build: h2797004_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda - sha256: 74ef5dcb900c38bec53140036e5e2a9cc7ffcd806da479ea2305f962a358a259 - md5: b58e6816d137f3aabf77d341dd5d732b - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux - license: Unlicense - size: 845977 - timestamp: 1698854720770 - kind: conda name: libsqlite version: 3.44.0 @@ -9346,6 +9212,20 @@ packages: license: Unlicense size: 852871 timestamp: 1698855272921 +- kind: conda + name: libsqlite + version: 3.46.0 + build: hde9e2c9_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda + sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 + md5: 18aa975d2094c34aef978060ae7da7d8 + depends: + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0a0 + license: Unlicense + size: 865346 + timestamp: 1718050628718 - kind: conda name: libssh2 version: 1.11.0 @@ -9356,10 +9236,8 @@ packages: md5: 1f5a58e686b13bcfde88b93f547d23fe depends: - libgcc-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openssl >=3.1.1,<4.0a0 - arch: x86_64 - platform: linux license: BSD-3-Clause license_family: BSD size: 271133 @@ -9508,10 +9386,8 @@ packages: - libevent >=2.1.12,<2.1.13.0a0 - libgcc-ng >=12 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openssl >=3.1.3,<4.0a0 - arch: x86_64 - platform: linux license: Apache-2.0 license_family: APACHE size: 409409 @@ -9555,11 +9431,9 @@ packages: - libjpeg-turbo >=2.1.5.1,<3.0a0 - libstdcxx-ng >=12 - libwebp-base >=1.3.2,<2.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - xz >=5.2.6,<6.0a0 - zstd >=1.5.5,<1.6.0a0 - arch: x86_64 - platform: linux license: HPND size: 277480 timestamp: 1694958140034 @@ -9978,23 +9852,21 @@ packages: timestamp: 1686575452215 - kind: conda name: libzlib - version: 1.2.13 - build: hd590300_5 - build_number: 5 + version: 1.3.1 + build: h4ab18f5_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda - sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4 - md5: f36c115f1ee199da648e0597ec2047ad + url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda + sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d + md5: 57d7dc60e9325e3de37ff8dffd18e814 depends: - libgcc-ng >=12 constrains: - - zlib 1.2.13 *_5 - arch: x86_64 - platform: linux + - zlib 1.3.1 *_1 license: Zlib license_family: Other - size: 61588 - timestamp: 1686575217516 + size: 61574 + timestamp: 1716874187109 - kind: conda name: libzopfli version: 1.0.3 @@ -11064,11 +10936,9 @@ packages: - libgcc-ng >=12 - libstdcxx-ng >=12 - libuv >=1.44.2,<1.45.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openssl >=3.1.2,<4.0a0 - - zlib * - arch: x86_64 - platform: linux + - zlib license: MIT license_family: MIT size: 17054450 @@ -11259,9 +11129,9 @@ packages: sha256: a67450cb4bbffd084c407da95fb0c7a0d845cdecd412b9d9b0793634e1cdddc2 md5: b1d60ca9e1cf8c82214380fc8a8582e0 depends: - - alsa-lib >=1.2.10,<1.2.11.0a0 + - alsa-lib >=1.2.10,<1.3.0.0a0 - fontconfig >=2.14.2,<3.0a0 - - fonts-conda-ecosystem * + - fonts-conda-ecosystem - freetype >=2.12.1,<3.0a0 - giflib >=5.2.1,<5.3.0a0 - harfbuzz >=8.1.1,<9.0a0 @@ -11271,15 +11141,13 @@ packages: - libjpeg-turbo >=2.1.5.1,<3.0a0 - libpng >=1.6.39,<1.7.0a0 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - xorg-libx11 >=1.8.6,<2.0a0 - xorg-libxext >=1.3.4,<2.0a0 - - xorg-libxi * + - xorg-libxi - xorg-libxrender >=0.9.11,<0.10.0a0 - xorg-libxt >=1.3.0,<2.0a0 - - xorg-libxtst * - arch: x86_64 - platform: linux + - xorg-libxtst license: GPL-2.0-or-later WITH Classpath-exception-2.0 license_family: GPL size: 176652534 @@ -11306,27 +11174,6 @@ packages: license_family: BSD size: 236847 timestamp: 1694708878963 -- kind: conda - name: openjpeg - version: 2.5.0 - build: h488ebb8_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda - sha256: 9fe91b67289267de68fda485975bb48f0605ac503414dc663b50d8b5f29bc82a - md5: 128c25b7fe6a25286a48f3a6a9b5b6f3 - depends: - - libgcc-ng >=12 - - libpng >=1.6.39,<1.7.0a0 - - libstdcxx-ng >=12 - - libtiff >=4.6.0,<4.7.0a0 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux - license: BSD-2-Clause - license_family: BSD - size: 356698 - timestamp: 1694708325417 - kind: conda name: openjpeg version: 2.5.0 @@ -11367,6 +11214,24 @@ packages: license_family: BSD size: 307087 timestamp: 1671435439914 +- kind: conda + name: openjpeg + version: 2.5.2 + build: h488ebb8_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda + sha256: 5600a0b82df042bd27d01e4e687187411561dfc11cc05143a08ce29b64bf2af2 + md5: 7f2e286780f072ed750df46dc2631138 + depends: + - libgcc-ng >=12 + - libpng >=1.6.43,<1.7.0a0 + - libstdcxx-ng >=12 + - libtiff >=4.6.0,<4.7.0a0 + - libzlib >=1.2.13,<2.0.0a0 + license: BSD-2-Clause + license_family: BSD + size: 341592 + timestamp: 1709159244431 - kind: conda name: openssl version: 3.1.2 @@ -11456,12 +11321,10 @@ packages: - libgcc-ng >=12 - libprotobuf >=4.23.3,<4.23.4.0a0 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - snappy >=1.1.10,<2.0a0 + - snappy >=1.1.10,<1.2.0a0 - zstd >=1.5.2,<1.6.0a0 - arch: x86_64 - platform: linux license: Apache-2.0 license_family: Apache size: 1020883 @@ -11725,24 +11588,6 @@ packages: license_family: BSD size: 12296643 timestamp: 1688741475871 -- kind: conda - name: pandoc - version: 3.1.3 - build: h32600fe_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.1.3-h32600fe_0.conda - sha256: 52d23e2fded05e7a19d9d7996f19ed837b46578b6e5951b8c5990cf919404ffc - md5: 8287aeb8462e2d4b235eff788e75919d - depends: - - gmp * - - libzlib >=1.2.13,<1.3.0a0 - - zlib * - arch: x86_64 - platform: linux - license: GPL-2.0-or-later - license_family: GPL - size: 29348226 - timestamp: 1686227552062 - kind: conda name: pandoc version: 3.1.3 @@ -11787,6 +11632,18 @@ packages: license_family: GPL size: 26314364 timestamp: 1686225215970 +- kind: conda + name: pandoc + version: '3.3' + build: ha770c72_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.3-ha770c72_0.conda + sha256: 0a9591992ada40a6dd2a3f37bfe51cd01956e54b1fa9204f2bd92b31148cb55e + md5: 0a3af8b93ba501c6ba020deacc9df841 + license: GPL-2.0-or-later + license_family: GPL + size: 20892835 + timestamp: 1722242814344 - kind: conda name: pandocfilters version: 1.5.0 @@ -11867,22 +11724,20 @@ packages: timestamp: 1665563317371 - kind: conda name: pcre2 - version: '10.40' - build: hc3806b6_0 + version: '10.44' + build: h0f59acf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2 - sha256: 7a29ec847556eed4faa1646010baae371ced69059a4ade43851367a076d6108a - md5: 69e2c796349cd9b273890bee0febfe1b + url: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-h0f59acf_0.conda + sha256: 90646ad0d8f9d0fd896170c4f3d754e88c4ba0eaf856c24d00842016f644baab + md5: 3914f7ac1761dce57102c72ca7c35d01 depends: - bzip2 >=1.0.8,<2.0a0 - libgcc-ng >=12 - - libzlib >=1.2.12,<1.3.0a0 - arch: x86_64 - platform: linux + - libzlib >=1.3.1,<2.0a0 license: BSD-3-Clause license_family: BSD - size: 2412495 - timestamp: 1665562915343 + size: 955778 + timestamp: 1718466128333 - kind: conda name: pep517 version: 0.13.0 @@ -11984,13 +11839,11 @@ packages: - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.3.2,<2.0a0 - libxcb >=1.15,<1.16.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - openjpeg >=2.5.0,<3.0a0 - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 - tk >=8.6.12,<8.7.0a0 - arch: x86_64 - platform: linux license: HPND size: 46384048 timestamp: 1695247436468 @@ -13142,17 +12995,15 @@ packages: - libnsl >=2.0.1,<2.1.0a0 - libsqlite >=3.43.2,<4.0a0 - libuuid >=2.38.1,<3.0a0 - - libzlib >=1.2.13,<1.3.0a0 + - libzlib >=1.2.13,<2.0.0a0 - ncurses >=6.4,<7.0a0 - openssl >=3.1.4,<4.0a0 - readline >=8.2,<9.0a0 - tk >=8.6.13,<8.7.0a0 - - tzdata * + - tzdata - xz >=5.2.6,<6.0a0 constrains: - python_abi 3.10.* *_cp310 - arch: x86_64 - platform: linux license: Python-2.0 size: 25476977 timestamp: 1698344640413 @@ -13178,110 +13029,75 @@ packages: timestamp: 1626286448716 - kind: conda name: python-duckdb - version: 0.8.1 - build: py310h1ba7dce_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/python-duckdb-0.8.1-py310h1ba7dce_1.conda - sha256: 7acb1777287ef7ff42c672a81671bc845a26dc4eb1e833fef4b85d3d053a9ca9 - md5: 1df97a88262853108f856ec5d665b911 + version: 1.0.0 + build: py310h9e98ed7_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/python-duckdb-1.0.0-py310h9e98ed7_0.conda + sha256: 23c2abb0018fdd2ee8176b33ac8eac48b6094a219b971c5fdc702285785aa4cd + md5: cae7ec224c706014f6e1568b3cf1cc96 depends: - - __osx >=10.12 - - arrow-cpp * - - libarrow >=12.0.1,<12.0.2.0a0 - - libcxx >=15.0.7 - - numpy >=1.22.4,<2.0a0 - - pandas * - - pyarrow * - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 - arch: x86_64 - platform: osx + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT - purls: - - pkg:pypi/duckdb - size: 17980711 - timestamp: 1692970522136 + size: 15638825 + timestamp: 1717687118745 - kind: conda name: python-duckdb - version: 0.8.1 - build: py310h1d8123b_0 + version: 1.0.0 + build: py310hcf9f62a_0 subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/python-duckdb-0.8.1-py310h1d8123b_0.conda - sha256: 5cb596c856568a0ce680e60589cdcb7628478b2428c2d848887d817af1520389 - md5: 12023f98da51a3312d3b38409493532f + url: https://conda.anaconda.org/conda-forge/osx-arm64/python-duckdb-1.0.0-py310hcf9f62a_0.conda + sha256: 720fdd1e1a34bafc4e5b671c4ab722d2953d09563ca2a4520bb6fb450510fa34 + md5: ff23b03d25d3614a05e91d94036b94b8 depends: - - numpy >=1.21.6,<2.0a0 - - libcxx >=15.0.7 - - pandas * + - __osx >=11.0 + - libcxx >=16 + - python >=3.10,<3.11.0a0 - python >=3.10,<3.11.0a0 *_cpython - - arrow-cpp * - - pyarrow * - python_abi 3.10.* *_cp310 - arch: aarch64 - platform: osx license: MIT license_family: MIT - purls: - - pkg:pypi/duckdb - size: 17131474 - timestamp: 1686941974438 + size: 18599847 + timestamp: 1717686407221 - kind: conda name: python-duckdb - version: 0.8.1 - build: py310h8e3e826_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-0.8.1-py310h8e3e826_1.conda - sha256: d100db61f42cff0354982649bf66c26cf37e914e67da79c5f2b828994f8a75b6 - md5: 13f684d8ff731a314d7a198635259de5 + version: 1.0.0 + build: py310he0a0c5d_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/python-duckdb-1.0.0-py310he0a0c5d_0.conda + sha256: 3dd1abaa03cb511588c848b74ffdd817f576f259f5d42ad76c77358277c8ae5a + md5: 2c7fa91f1a5f57a72b1aec7e25f0a169 depends: - - arrow-cpp * - - libarrow >=12.0.1,<12.0.2.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - numpy >=1.22.4,<2.0a0 - - pandas * - - pyarrow * + - __osx >=10.13 + - libcxx >=16 - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 - arch: x86_64 - platform: linux license: MIT license_family: MIT - purls: - - pkg:pypi/duckdb - size: 20204220 - timestamp: 1692968940474 + size: 20190347 + timestamp: 1717686142652 - kind: conda name: python-duckdb - version: 0.8.1 - build: py310hb400963_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/python-duckdb-0.8.1-py310hb400963_1.conda - sha256: 1cb5dbd8c71432607df648ee97f11202fee696bb4ab3e32c64c7edf8375138e3 - md5: e0a3d41e9e4a8753b75b79185f896316 + version: 1.0.0 + build: py310hea249c9_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-1.0.0-py310hea249c9_0.conda + sha256: c85731fcd95eba6459f74c675dc6ea6a4ec31ab09607d4bb4316c701690cec20 + md5: 630bef971bd14f61afa83422425d7f95 depends: - - arrow-cpp * - - libarrow >=12.0.1,<12.0.2.0a0 - - numpy >=1.22.4,<2.0a0 - - pandas * - - pyarrow * + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - arch: x86_64 - platform: win license: MIT license_family: MIT - purls: - - pkg:pypi/duckdb - size: 13869507 - timestamp: 1692972370649 + size: 22769349 + timestamp: 1717686625369 - kind: conda name: python-fastjsonschema version: 2.18.0 @@ -14120,131 +13936,125 @@ packages: timestamp: 1697072452949 - kind: conda name: rust - version: 1.75.0 - build: h4ff7c5d_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.75.0-h4ff7c5d_0.conda - sha256: 8bd35dcc0e9761d7f02500ed907e6ee8992c3549250361e40ff5016f84cef215 - md5: 12b6a710ed0f1f366fedbf9d69b7b277 + version: 1.80.1 + build: h0a17960_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/rust-1.80.1-h0a17960_0.conda + sha256: 7058519747d4b81f3cab23a0d6b4326c80879d38b2a0bf11cade52fc59980b8f + md5: dba7ad0d2f707fee5e85c6a19042fdb4 depends: - - rust-std-aarch64-apple-darwin 1.75.0 hf6ec828_0 + - __glibc >=2.17,<3.0.a0 + - gcc_impl_linux-64 + - libgcc-ng >=12 + - libzlib >=1.3.1,<2.0a0 + - rust-std-x86_64-unknown-linux-gnu 1.80.1 h2c6d0dc_0 + - sysroot_linux-64 >=2.17 license: MIT - license_family: MIT - size: 181697449 - timestamp: 1704208767306 + size: 198885602 + timestamp: 1723153698032 - kind: conda name: rust - version: 1.75.0 - build: h70c747d_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/rust-1.75.0-h70c747d_0.conda - sha256: 5de27c76796ce0dd0ef7496f7c7f9c33a4e4cfa59112f8a1d7b2ada41794609a - md5: d2112c5913c6a3741eecff0c3ab02e7e + version: 1.80.1 + build: h4ff7c5d_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.80.1-h4ff7c5d_0.conda + sha256: 5b296bb663be4c10bf3d07eaaa69c3c5856bd198152a775404e161f6780236bb + md5: 76d236abc95f2d77f7a3c16f1b565b3e depends: - - gcc_impl_linux-64 - - libgcc-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - - rust-std-x86_64-unknown-linux-gnu 1.75.0 h2c6d0dc_0 + - rust-std-aarch64-apple-darwin 1.80.1 hf6ec828_0 license: MIT - license_family: MIT - size: 190432074 - timestamp: 1704209051045 + size: 197866703 + timestamp: 1723155024117 - kind: conda name: rust - version: 1.75.0 - build: h7e1429e_0 + version: 1.80.1 + build: h6c54e5d_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/rust-1.75.0-h7e1429e_0.conda - sha256: 715783725eb9f5689bf57dda8eff8ca85df8940336129e82a836ab8e10aefae1 - md5: 21ab11c8f798546e46f023093f650866 + url: https://conda.anaconda.org/conda-forge/osx-64/rust-1.80.1-h6c54e5d_0.conda + sha256: 8e799c550545a41baef23a543ffd87620cf67c0afd3494ea40b6081cbf8aabe7 + md5: ecf36b937ded5c641039161f7f5c7f64 depends: - - rust-std-x86_64-apple-darwin 1.75.0 h38e4360_0 + - rust-std-x86_64-apple-darwin 1.80.1 h38e4360_0 license: MIT - license_family: MIT - size: 191324119 - timestamp: 1704208696296 + size: 202606989 + timestamp: 1723154998091 - kind: conda name: rust - version: 1.75.0 + version: 1.80.1 build: hf8d6059_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/rust-1.75.0-hf8d6059_0.conda - sha256: bb0e39570b63bf10c859afcc1593c41d4e9b164e63f78835403f79cbb3145d4d - md5: 68ac0e9721e34c115201a76c63bd82c8 + url: https://conda.anaconda.org/conda-forge/win-64/rust-1.80.1-hf8d6059_0.conda + sha256: 3d8f926d5db03762a1e3ff723295ea18674c29960e2e501a16c9413304698654 + md5: 385a661cb1746cb6c62eb55712b412dd depends: - - rust-std-x86_64-pc-windows-msvc 1.75.0 h17fc481_0 + - rust-std-x86_64-pc-windows-msvc 1.80.1 h17fc481_0 license: MIT - license_family: MIT - size: 186456319 - timestamp: 1704211980549 + size: 194534225 + timestamp: 1723155969495 - kind: conda name: rust-std-aarch64-apple-darwin - version: 1.75.0 + version: 1.80.1 build: hf6ec828_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.75.0-hf6ec828_0.conda - sha256: 19d2d1247e239c204352e4419f8a35a5abe2b9ec1e02c151ac611c2d50f2edab - md5: dd34d3d31a2b900e59582e8616bdeca8 + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.80.1-hf6ec828_0.conda + sha256: 6cd8c3cf93fb8348c815595eced946316bc81a0bf8c6fc8f6b9f27e270734770 + md5: b3b07764d1fa59acf5c356bbb727db20 depends: - __unix constrains: - - rust >=1.75.0,<1.75.1.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT - license_family: MIT - size: 29617229 - timestamp: 1704208512989 + size: 30991019 + timestamp: 1723152907303 - kind: conda name: rust-std-x86_64-apple-darwin - version: 1.75.0 + version: 1.80.1 build: h38e4360_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.75.0-h38e4360_0.conda - sha256: bbcbb5f8643dd61605f6edb6487b18497511669267091f578b29e4902ccb421c - md5: ddfe0984ccdd936ee23ce8b0c4c88d6a + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.80.1-h38e4360_0.conda + sha256: 56a30b275235975ea4e37f8d703818079601163aca92195a45468b0e7d6beffb + md5: b1ce3c6d57f2cf9f5a8b2448e3b6f499 depends: - __unix constrains: - - rust >=1.75.0,<1.75.1.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT - license_family: MIT - size: 30638012 - timestamp: 1704208441871 + size: 31988631 + timestamp: 1723152891461 - kind: conda name: rust-std-x86_64-pc-windows-msvc - version: 1.75.0 + version: 1.80.1 build: h17fc481_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.75.0-h17fc481_0.conda - sha256: e7f62368332a48b8c7b44e27204cbf6c6d71fd433f6d7a24c20dacd623557503 - md5: 376e875d5c8e957a3ef44d24eff109eb + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.80.1-h17fc481_0.conda + sha256: a4f118c6211f717846c094e58d3baef32215d1a2414d51c3e08b739dce75c28f + md5: f21862b6487af2fe504ca2b78dfec822 depends: - __win constrains: - - rust >=1.75.0,<1.75.1.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT - license_family: MIT - size: 24987788 - timestamp: 1704211520005 + size: 25255952 + timestamp: 1723155705619 - kind: conda name: rust-std-x86_64-unknown-linux-gnu - version: 1.75.0 + version: 1.80.1 build: h2c6d0dc_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.75.0-h2c6d0dc_0.conda - sha256: 4ae02c3fadece8b4c0b0a214f21b4fd8e47ec81a332c503fdd21a659a472f108 - md5: 46ab571e9b711ed713cd515395d187dd + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.80.1-h2c6d0dc_0.conda + sha256: 769cb83291804c9faa0de81534ceb3794cd06efd4d5164872bd5527e511f12a7 + md5: 0a5b8783d18a253b0812a5501df297af depends: - __unix constrains: - - rust >=1.75.0,<1.75.1.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT - license_family: MIT - size: 33042935 - timestamp: 1704208890522 + size: 33938994 + timestamp: 1723153507938 - kind: conda name: s2n version: 1.3.54 @@ -14959,22 +14769,22 @@ packages: timestamp: 1666792984220 - kind: conda name: sysroot_linux-64 - version: '2.12' - build: he073ed8_16 + version: '2.17' + build: h4a8ded7_16 build_number: 16 - subdir: linux-64 + subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_16.conda - sha256: 4c024b2eee24c6da7d3e08723111ec02665c578844c5b3e9e6b38f89000bec41 - md5: 071ea8dceff4d30ac511f4a2f8437cd1 + url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda + sha256: b892b0b9c6dc8efe8b9b5442597d1ab8d65c0dc7e4e5a80f822cbdf0a639bd77 + md5: 223fe8a3ff6d5e78484a9d58eb34d055 depends: - - kernel-headers_linux-64 ==2.6.32 he073ed8_16 - arch: x86_64 - platform: linux + - _sysroot_linux-64_curr_repodata_hack 3.* + - kernel-headers_linux-64 3.10.0 h4a8ded7_16 + - tzdata license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 license_family: GPL - size: 15277813 - timestamp: 1689214980563 + size: 15513240 + timestamp: 1720621429816 - kind: conda name: tbb version: 2021.10.0 @@ -15162,23 +14972,6 @@ packages: license_family: BSD size: 3382710 timestamp: 1645032642101 -- kind: conda - name: tk - version: 8.6.13 - build: h2797004_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda - sha256: 679e944eb93fde45d0963a22598fafacbb429bb9e7ee26009ba81c4e0c435055 - md5: 513336054f884f95d9fd925748f41ef3 - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux - license: TCL - license_family: BSD - size: 3290187 - timestamp: 1695506262576 - kind: conda name: tk version: 8.6.13 @@ -15213,6 +15006,22 @@ packages: license_family: BSD size: 3273909 timestamp: 1695506576288 +- kind: conda + name: tk + version: 8.6.13 + build: noxft_h4845f30_101 + build_number: 101 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e + md5: d453b98d9c83e71da0741bb0ff4d76bc + depends: + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD + size: 3318875 + timestamp: 1699202167581 - kind: conda name: toml version: 0.10.2 @@ -15810,23 +15619,21 @@ packages: timestamp: 1688020629925 - kind: conda name: vc14_runtime - version: 14.36.32532 - build: hdcecf7f_17 - build_number: 17 + version: 14.40.33810 + build: ha82c5b3_20 + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.36.32532-hdcecf7f_17.conda - sha256: b317d49af32d5c031828e62c08d56f01d9a64cd3f40d4cccb052bc38c7a9e62e - md5: d0de20f2f3fc806a81b44fcdd941aaf7 + url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_20.conda + sha256: af3cfa347e3d7c1277e9b964b0849a9a9f095bff61836cb3c3a89862fbc32e17 + md5: e39cc4c34c53654ec939558993d9dc5b depends: - ucrt >=10.0.20348.0 constrains: - - vs2015_runtime 14.36.32532.* *_17 - arch: x86_64 - platform: win + - vs2015_runtime 14.40.33810.* *_20 license: LicenseRef-ProprietaryMicrosoft license_family: Proprietary - size: 739437 - timestamp: 1694292382336 + size: 751934 + timestamp: 1717709031266 - kind: conda name: vega_datasets version: 0.9.0 @@ -15849,77 +15656,80 @@ packages: timestamp: 1606414171959 - kind: conda name: vl-convert-python - version: 1.3.0 - build: py310h2372a71_0 + version: 1.6.0 + build: py310h5b4e0ec_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/vl-convert-python-1.3.0-py310h2372a71_0.conda - sha256: 7595849fe8c89ec1898006aa56f4aa3e47b0865c2a7d6c51a9f757d9f72abcf5 - md5: 2bfea92f6bccef4322396ef303d2b479 + url: https://conda.anaconda.org/conda-forge/linux-64/vl-convert-python-1.6.0-py310h5b4e0ec_0.conda + sha256: 8e788a7c3e708748521a9e0509a978d484665bd6e5c57ac32faa86dc71549bf9 + md5: eac355b92fe08a372c6d3cae74f15aac depends: + - __glibc >=2.17,<3.0.a0 - libgcc-ng >=12 - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 + constrains: + - __glibc >=2.17 license: BSD-3-Clause license_family: BSD - purls: - - pkg:pypi/vl-convert-python - size: 21548131 - timestamp: 1710359776523 + size: 22615432 + timestamp: 1722901880182 - kind: conda name: vl-convert-python - version: 1.3.0 - build: py310h7664a31_0 + version: 1.6.0 + build: py310h936d840_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/vl-convert-python-1.3.0-py310h7664a31_0.conda - sha256: a77c8f1f55a56d4cb75ab05a71b683d5c68093243e100627d59e3ef2d9e0fcbc - md5: 89fb4ef3eaf52fe9e8db31a1b6588b3e + url: https://conda.anaconda.org/conda-forge/osx-64/vl-convert-python-1.6.0-py310h936d840_0.conda + sha256: 301dbd28a46fe52f6c1d7c2c713672d8e3ce652fd3df6e97c12e161d1f748086 + md5: 474c329abe2ac606f4cc04773cca8e55 depends: - __osx >=10.13 - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 + constrains: + - __osx >=10.13 license: BSD-3-Clause license_family: BSD - purls: - - pkg:pypi/vl-convert-python - size: 20158201 - timestamp: 1710361604381 + size: 21172242 + timestamp: 1722902056741 - kind: conda name: vl-convert-python - version: 1.3.0 - build: py310hd125d64_0 + version: 1.6.0 + build: py310ha6dd24b_0 subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/vl-convert-python-1.3.0-py310hd125d64_0.conda - sha256: a1f5bc51101f1a77baae9bd0c9a810b8e3adb717858872d49c547de647059b00 - md5: 9d2f42d9b90f52f6940bd05abab5ca9a + url: https://conda.anaconda.org/conda-forge/osx-arm64/vl-convert-python-1.6.0-py310ha6dd24b_0.conda + sha256: 441d5fe773854e34c3018ce982bc25a55a951e9b50b18a25689afac07e0ca54b + md5: 5446a027ac5a4ea7f3e95cc42717b9d4 depends: + - __osx >=11.0 - python >=3.10,<3.11.0a0 - python >=3.10,<3.11.0a0 *_cpython - python_abi 3.10.* *_cp310 + constrains: + - __osx >=10.13 license: BSD-3-Clause license_family: BSD - purls: - - pkg:pypi/vl-convert-python - size: 19552349 - timestamp: 1710359651788 + size: 20412409 + timestamp: 1722901289142 - kind: conda name: vl-convert-python - version: 1.3.0 - build: py310hdc45392_0 + version: 1.6.0 + build: py310hb47754f_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vl-convert-python-1.3.0-py310hdc45392_0.conda - sha256: 013b75d57d01f59fdff41e5da54da155dbceaa1e6f90ca5e8d60ee9e5f576bd4 - md5: eb3372e950aaed0ef168a85fb97aa4c1 + url: https://conda.anaconda.org/conda-forge/win-64/vl-convert-python-1.6.0-py310hb47754f_0.conda + sha256: fb55fad4b37b1aaa0294574c3ce0b724a3770c9d61ee9916ef79db2ef0d862d2 + md5: 7890e67278c0dacbd5c65d5e32f7cfe4 depends: - m2w64-gcc-libs - m2w64-gcc-libs-core - python >=3.10,<3.11.0a0 - python_abi 3.10.* *_cp310 + - ucrt >=10.0.20348.0 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 license: BSD-3-Clause license_family: BSD - purls: - - pkg:pypi/vl-convert-python - size: 20761528 - timestamp: 1710361963411 + size: 21875382 + timestamp: 1722903742368 - kind: conda name: voila version: 0.5.0 @@ -15951,21 +15761,19 @@ packages: timestamp: 1692275824749 - kind: conda name: vs2015_runtime - version: 14.36.32532 - build: h05e6639_17 - build_number: 17 + version: 14.40.33810 + build: h3bf8584_20 + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.36.32532-h05e6639_17.conda - sha256: 5ecbd731dc7f13762d67be0eadc47eb7f14713005e430d9b5fc680e965ac0f81 - md5: 4618046c39f7c81861e53ded842e738a + url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda + sha256: 0c2803f7a788c51f28235a7228dc2ab3f107b4b16ab0845a3e595c8c51e50a7a + md5: c21f1b4a3a30bbc3ef35a50957578e0e depends: - - vc14_runtime >=14.36.32532 - arch: x86_64 - platform: win + - vc14_runtime >=14.40.33810 license: BSD-3-Clause license_family: BSD - size: 17207 - timestamp: 1688020635322 + size: 17395 + timestamp: 1717709043353 - kind: conda name: watchfiles version: 0.21.0 @@ -17253,22 +17061,20 @@ packages: timestamp: 1686575574678 - kind: conda name: zlib - version: 1.2.13 - build: hd590300_5 - build_number: 5 + version: 1.3.1 + build: h4ab18f5_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda - sha256: 9887a04d7e7cb14bd2b52fa01858f05a6d7f002c890f618d9fcd864adbfecb1b - md5: 68c34ec6149623be41a1933ab996a209 + url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda + sha256: cee16ab07a11303de721915f0a269e8c7a54a5c834aa52f74b1cc3a59000ade8 + md5: 9653f1bf3766164d0e65fa723cabbc54 depends: - libgcc-ng >=12 - - libzlib ==1.2.13 hd590300_5 - arch: x86_64 - platform: linux + - libzlib 1.3.1 h4ab18f5_1 license: Zlib license_family: Other - size: 92825 - timestamp: 1686575231103 + size: 93004 + timestamp: 1716874213487 - kind: conda name: zlib-ng version: 2.0.7 @@ -17385,19 +17191,17 @@ packages: timestamp: 1693151312586 - kind: conda name: zstd - version: 1.5.5 - build: hfc55251_0 + version: 1.5.6 + build: ha6fb4c9_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda - sha256: 607cbeb1a533be98ba96cf5cdf0ddbb101c78019f1fda063261871dad6248609 - md5: 04b88013080254850d6c01ed54810589 + url: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda + sha256: c558b9cc01d9c1444031bd1ce4b9cff86f9085765f17627a6cd85fc623c8a02b + md5: 4d056880988120e29d75bfff282e0f45 depends: - libgcc-ng >=12 - libstdcxx-ng >=12 - - libzlib >=1.2.13,<1.3.0a0 - arch: x86_64 - platform: linux + - libzlib >=1.2.13,<2.0.0a0 license: BSD-3-Clause license_family: BSD - size: 545199 - timestamp: 1693151163452 + size: 554846 + timestamp: 1714722996770 diff --git a/pixi.toml b/pixi.toml index 8bdd4a690..a5f686df4 100644 --- a/pixi.toml +++ b/pixi.toml @@ -111,7 +111,7 @@ scikit-image = "0.21.0.*" toml = "0.10.2.*" pytest = ">=4.6" click = "8.1.6.*" -python-duckdb = "0.8.1.*" +python-duckdb = "1.0" jupyter-packaging = "0.12.3.*" pip = "23.2.1.*" voila = "0.5.0.*" @@ -134,8 +134,8 @@ pandas = "2.0.3.*" altair = "5.3.*" protobuf = "4.23.3.*" ipywidgets = "8.1.0.*" -rust = "1.75.*" -vl-convert-python = "1.3.*" +rust = "1.80.*" +vl-convert-python = "1.6.*" anywidget = ">=0.9.6,<0.10" [target.osx-arm64.build-dependencies] diff --git a/python/vegafusion/tests/test_pretransform.py b/python/vegafusion/tests/test_pretransform.py index 1cef326e8..4df4d1449 100644 --- a/python/vegafusion/tests/test_pretransform.py +++ b/python/vegafusion/tests/test_pretransform.py @@ -1585,10 +1585,12 @@ def test_nat_values(): def test_pre_transform_dataset_dataframe_interface_protocol(): + try: import pyarrow.interchange except ImportError: pytest.skip("DataFrame interface protocol requires pyarrow 11.0.0 or later") + from polars.testing import assert_frame_equal n = 4050 # Input a polars DataFrame (which follows the DataFrame Interface Protocol) @@ -1612,7 +1614,7 @@ def test_pre_transform_dataset_dataframe_interface_protocol(): # Result should be a polars DataFrame assert isinstance(result, pl.DataFrame) expected = pl.DataFrame({"menu_item": [0, 1, 2], "__count": [n, 2 * n, 3 * n]}) - assert result.frame_equal(expected) + assert_frame_equal(result, expected) def test_pre_transform_dataset_duckdb_conn(): diff --git a/vegafusion-common/Cargo.toml b/vegafusion-common/Cargo.toml index 4da10a748..669cc4c26 100644 --- a/vegafusion-common/Cargo.toml +++ b/vegafusion-common/Cargo.toml @@ -23,7 +23,7 @@ optional = true [dependencies.serde_json] version = "1.0.91" -default_features = false +default-features = false optional = true [dependencies.arrow] @@ -36,6 +36,9 @@ workspace = true [dependencies.datafusion-expr] workspace = true +[dependencies.datafusion-functions] +workspace = true + [dependencies.datafusion-proto] workspace = true optional = true diff --git a/vegafusion-common/src/data/scalar.rs b/vegafusion-common/src/data/scalar.rs index d0ffb6d13..11bd551ad 100644 --- a/vegafusion-common/src/data/scalar.rs +++ b/vegafusion-common/src/data/scalar.rs @@ -1,13 +1,14 @@ use crate::error::{Result, VegaFusionError}; -use arrow::array::{new_empty_array, Array, ArrayRef, ListArray}; +use arrow::array::{Array, ArrayRef, ListArray}; use datafusion_common::DataFusionError; use arrow::datatypes::DataType; -use datafusion_common::utils::array_into_list_array; pub use datafusion_common::ScalarValue; #[cfg(feature = "json")] use { + arrow::array::new_empty_array, + datafusion_common::utils::array_into_list_array, serde_json::{Map, Value}, std::ops::Deref, std::sync::Arc, diff --git a/vegafusion-common/src/datatypes.rs b/vegafusion-common/src/datatypes.rs index 784e343f2..a91b55ce2 100644 --- a/vegafusion-common/src/datatypes.rs +++ b/vegafusion-common/src/datatypes.rs @@ -1,10 +1,8 @@ use crate::error::{Result, ResultWithContext}; use arrow::datatypes::DataType; use datafusion_common::DFSchema; -use datafusion_expr::{ - coalesce, expr, lit, BuiltinScalarFunction, Expr, ExprSchemable, ScalarFunctionDefinition, - TryCast, -}; +use datafusion_expr::{lit, Expr, ExprSchemable, TryCast}; +use datafusion_functions::{datetime::expr_fn::to_timestamp_millis, expr_fn::coalesce}; pub fn is_numeric_datatype(dtype: &DataType) -> bool { matches!( @@ -20,6 +18,8 @@ pub fn is_numeric_datatype(dtype: &DataType) -> bool { | DataType::Float16 | DataType::Float32 | DataType::Float64 + | DataType::Decimal128(_, _) + | DataType::Decimal256(_, _) ) } @@ -40,7 +40,11 @@ pub fn is_integer_datatype(dtype: &DataType) -> bool { pub fn is_float_datatype(dtype: &DataType) -> bool { matches!( dtype, - DataType::Float16 | DataType::Float32 | DataType::Float64 + DataType::Float16 + | DataType::Float32 + | DataType::Float64 + | DataType::Decimal128(_, _) + | DataType::Decimal256(_, _) ) } @@ -84,17 +88,13 @@ pub fn to_boolean(value: Expr, schema: &DFSchema) -> Result { /// Cast an expression to Float64 if not already numeric. If already numeric, don't perform cast. pub fn to_numeric(value: Expr, schema: &DFSchema) -> Result { let dtype = data_type(&value, schema)?; + let numeric_value = if is_numeric_datatype(&dtype) { value } else if matches!(dtype, DataType::Timestamp(_, _)) { // Convert to milliseconds Expr::TryCast(TryCast { - expr: Box::new(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn( - BuiltinScalarFunction::ToTimestampMillis, - ), - args: vec![value], - })), + expr: Box::new(to_timestamp_millis(vec![value])), data_type: DataType::Int64, }) } else { diff --git a/vegafusion-dataframe/Cargo.toml b/vegafusion-dataframe/Cargo.toml index 740f41ec8..3095ddd43 100644 --- a/vegafusion-dataframe/Cargo.toml +++ b/vegafusion-dataframe/Cargo.toml @@ -15,6 +15,9 @@ async-trait = "0.1.73" path = "../vegafusion-common" version = "1.6.9" +[dependencies.sqlparser] +workspace = true + [dependencies.datafusion-common] workspace = true @@ -23,7 +26,7 @@ workspace = true [dependencies.arrow] workspace = true -default_features = false +default-features = false [dependencies.pyo3] workspace = true diff --git a/vegafusion-dataframe/src/dataframe.rs b/vegafusion-dataframe/src/dataframe.rs index 4a72ac6b0..5f30bc7ec 100644 --- a/vegafusion-dataframe/src/dataframe.rs +++ b/vegafusion-dataframe/src/dataframe.rs @@ -5,6 +5,7 @@ use arrow::record_batch::RecordBatch; use async_trait::async_trait; use datafusion_common::{DFSchema, ScalarValue}; use datafusion_expr::{expr, BuiltInWindowFunction, Expr, WindowFrame, WindowFunctionDefinition}; +use sqlparser::ast::NullTreatment; use std::any::Any; use std::fmt::{Display, Formatter}; use std::sync::Arc; @@ -120,6 +121,7 @@ pub trait DataFrame: Send + Sync + 'static { partition_by: vec![], order_by: vec![], window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(index_name), Expr::Wildcard { qualifier: None }, diff --git a/vegafusion-datafusion-udfs/Cargo.toml b/vegafusion-datafusion-udfs/Cargo.toml index 1b011bb96..db54cbd87 100644 --- a/vegafusion-datafusion-udfs/Cargo.toml +++ b/vegafusion-datafusion-udfs/Cargo.toml @@ -20,3 +20,6 @@ version = "1.6.9" [dependencies.datafusion-physical-expr] workspace = true + +[dependencies.datafusion-functions] +workspace = true diff --git a/vegafusion-datafusion-udfs/src/udafs/mod.rs b/vegafusion-datafusion-udfs/src/udafs/mod.rs index 8afefd60a..f9dbfe0d1 100644 --- a/vegafusion-datafusion-udfs/src/udafs/mod.rs +++ b/vegafusion-datafusion-udfs/src/udafs/mod.rs @@ -146,8 +146,8 @@ lazy_static! { Arc::new(DataType::Float64), Volatility::Immutable, // Accumulator factory - Arc::new(|dtype| Ok(Box::new(PercentileContAccumulator { - data_type: dtype.clone(), + Arc::new(|accum_args| Ok(Box::new(PercentileContAccumulator { + data_type: accum_args.data_type.clone(), all_values: Default::default(), percentile: 0.25, }))), @@ -165,8 +165,8 @@ lazy_static! { Arc::new(DataType::Float64), Volatility::Immutable, // Accumulator factory - Arc::new(|dtype| Ok(Box::new(PercentileContAccumulator { - data_type: dtype.clone(), + Arc::new(|accum_args| Ok(Box::new(PercentileContAccumulator { + data_type: accum_args.data_type.clone(), all_values: Default::default(), percentile: 0.75, }))), diff --git a/vegafusion-datafusion-udfs/src/udfs/datetime/date_part_tz.rs b/vegafusion-datafusion-udfs/src/udfs/datetime/date_part_tz.rs index e322b92ad..b703a79e9 100644 --- a/vegafusion-datafusion-udfs/src/udfs/datetime/date_part_tz.rs +++ b/vegafusion-datafusion-udfs/src/udfs/datetime/date_part_tz.rs @@ -1,6 +1,6 @@ use crate::udfs::datetime::from_utc_timestamp::from_utc_timestamp; use crate::udfs::datetime::to_utc_timestamp::to_timestamp_ms; -use datafusion_physical_expr::datetime_expressions; +use datafusion_functions::datetime::date_part; use std::any::Any; use std::str::FromStr; use vegafusion_common::datafusion_expr::{ScalarUDFImpl, TypeSignature}; @@ -99,7 +99,8 @@ impl ScalarUDFImpl for DatePartTzUDF { let timestamp_in_tz = ColumnarValue::Array(timestamp_in_tz); // Use DataFusion's built-in date_part implementation - datetime_expressions::date_part(&[ + let udf = date_part(); + udf.invoke(&[ args[0].clone(), // Part timestamp_in_tz, // Timestamp converted to timezone ]) diff --git a/vegafusion-datafusion-udfs/src/udfs/datetime/str_to_utc_timestamp.rs b/vegafusion-datafusion-udfs/src/udfs/datetime/str_to_utc_timestamp.rs index be2790d28..cc6f3333c 100644 --- a/vegafusion-datafusion-udfs/src/udfs/datetime/str_to_utc_timestamp.rs +++ b/vegafusion-datafusion-udfs/src/udfs/datetime/str_to_utc_timestamp.rs @@ -117,14 +117,14 @@ pub fn parse_datetime_fallback( date_str: &str, default_input_tz: &Option, ) -> Option> { - let mut date_tokens = vec![String::from(""), String::from(""), String::from("")]; - let mut time_tokens = vec![ + let mut date_tokens = [String::from(""), String::from(""), String::from("")]; + let mut time_tokens = [ String::from(""), String::from(""), String::from(""), String::from(""), ]; - let mut timezone_tokens = vec![String::from(""), String::from("")]; + let mut timezone_tokens = [String::from(""), String::from("")]; let mut timezone_sign = ' '; let mut date_ind = 0; let mut time_ind = 0; diff --git a/vegafusion-datafusion-udfs/src/udfs/math/isfinite.rs b/vegafusion-datafusion-udfs/src/udfs/math/isfinite.rs index 2769c9105..edf44dcad 100644 --- a/vegafusion-datafusion-udfs/src/udfs/math/isfinite.rs +++ b/vegafusion-datafusion-udfs/src/udfs/math/isfinite.rs @@ -1,10 +1,11 @@ -use datafusion_physical_expr::udf::ScalarUDF; use std::any::Any; use std::sync::Arc; use vegafusion_common::arrow::array::{BooleanArray, Float32Array, Float64Array}; use vegafusion_common::arrow::datatypes::DataType; use vegafusion_common::datafusion_common::{DataFusionError, ScalarValue}; -use vegafusion_common::datafusion_expr::{ColumnarValue, ScalarUDFImpl, Signature, Volatility}; +use vegafusion_common::datafusion_expr::{ + ColumnarValue, ScalarUDF, ScalarUDFImpl, Signature, Volatility, +}; /// `isFinite(value)` /// diff --git a/vegafusion-jni/Cargo.toml b/vegafusion-jni/Cargo.toml index 3d37ad5d6..56f670ae4 100644 --- a/vegafusion-jni/Cargo.toml +++ b/vegafusion-jni/Cargo.toml @@ -4,7 +4,7 @@ version = "1.6.9" edition = "2021" [lib] -crate_type = [ "cdylib",] +crate-type = [ "cdylib",] [features] protobuf-src = [ "vegafusion-core/protobuf-src",] diff --git a/vegafusion-jni/src/lib.rs b/vegafusion-jni/src/lib.rs index 26e286ccf..5c48b28e5 100644 --- a/vegafusion-jni/src/lib.rs +++ b/vegafusion-jni/src/lib.rs @@ -312,11 +312,11 @@ pub unsafe extern "system" fn Java_io_vegafusion_VegaFusionRuntime_innerPreTrans } Ok(Err(vf_err)) => { let _ = env.throw_new("io/vegafusion/VegaFusionException", vf_err.to_string()); - return JObject::null().into_raw(); + JObject::null().into_raw() } Err(_unwind_err) => { let _ = env.throw_new("io/vegafusion/VegaFusionException", "Uncaught Error"); - return JObject::null().into_raw(); + JObject::null().into_raw() } } } else { @@ -324,6 +324,6 @@ pub unsafe extern "system" fn Java_io_vegafusion_VegaFusionRuntime_innerPreTrans "io/vegafusion/VegaFusionException", "Failed to parse args to innerPreTransformSpec", ); - return JObject::null().into_raw(); + JObject::null().into_raw() } } diff --git a/vegafusion-python-embed/src/dataframe.rs b/vegafusion-python-embed/src/dataframe.rs index 6be7d543f..b5d11f6d4 100644 --- a/vegafusion-python-embed/src/dataframe.rs +++ b/vegafusion-python-embed/src/dataframe.rs @@ -1,7 +1,8 @@ use arrow::datatypes::Schema; use arrow::pyarrow::FromPyArrow; use async_trait::async_trait; -use datafusion_proto::protobuf::LogicalExprNode; +use datafusion_proto::logical_plan::to_proto::serialize_expr; +use datafusion_proto::logical_plan::DefaultLogicalExtensionCodec; use prost::Message; use pyo3::prelude::PyModule; use pyo3::types::{PyBytes, PyTuple}; @@ -522,10 +523,12 @@ fn exprs_to_py(py: Python, exprs: Vec) -> Result { } fn expr_to_py(py: Python, expr: &Expr) -> Result { + let extension_codec = DefaultLogicalExtensionCodec {}; let proto_module = PyModule::import(py, "vegafusion.proto.datafusion_pb2")?; let logical_expr_class = proto_module.getattr("LogicalExprNode")?; - let proto_sort_expr = LogicalExprNode::try_from(expr)?; + let proto_sort_expr = serialize_expr(expr, &extension_codec)?; + let sort_expr_bytes: Vec = proto_sort_expr.encode_to_vec(); // py_logical_expr = LogicalExprNode() diff --git a/vegafusion-runtime/Cargo.toml b/vegafusion-runtime/Cargo.toml index e68eccd9c..a013cb573 100644 --- a/vegafusion-runtime/Cargo.toml +++ b/vegafusion-runtime/Cargo.toml @@ -103,18 +103,21 @@ workspace = true [dependencies.datafusion-functions] workspace = true +[dependencies.datafusion-functions-array] +workspace = true + [dependencies.tokio] workspace = true features = [ "macros", "rt-multi-thread", "fs",] [dependencies.reqwest] workspace = true -default_features = false +default-features = false features = [ "rustls-tls",] [dev-dependencies.reqwest] workspace = true -default_features = false +default-features = false features = [ "blocking", "rustls-tls",] [dev-dependencies.criterion] diff --git a/vegafusion-runtime/src/expression/compiler/array.rs b/vegafusion-runtime/src/expression/compiler/array.rs index 9732b2ec9..8766c2d4f 100644 --- a/vegafusion-runtime/src/expression/compiler/array.rs +++ b/vegafusion-runtime/src/expression/compiler/array.rs @@ -1,5 +1,6 @@ use crate::expression::compiler::{compile, config::CompilationConfig}; -use datafusion_expr::{expr, BuiltinScalarFunction, Expr, ScalarFunctionDefinition}; +use datafusion_expr::Expr; +use datafusion_functions_array::expr_fn::make_array; use vegafusion_common::datafusion_common::DFSchema; use vegafusion_core::error::Result; use vegafusion_core::proto::gen::expression::ArrayExpression; @@ -14,8 +15,5 @@ pub fn compile_array( let phys_expr = compile(el, config, Some(schema))?; elements.push(phys_expr); } - Ok(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: elements, - })) + Ok(make_array(elements)) } diff --git a/vegafusion-runtime/src/expression/compiler/binary.rs b/vegafusion-runtime/src/expression/compiler/binary.rs index be74713bd..734c998c6 100644 --- a/vegafusion-runtime/src/expression/compiler/binary.rs +++ b/vegafusion-runtime/src/expression/compiler/binary.rs @@ -1,6 +1,8 @@ use crate::expression::compiler::{compile, config::CompilationConfig}; use datafusion_expr::expr::BinaryExpr; -use datafusion_expr::{coalesce, concat, lit, Expr, Operator}; +use datafusion_expr::{lit, Expr, Operator}; +use datafusion_functions::expr_fn::coalesce; +use datafusion_functions::string::expr_fn::concat; use vegafusion_common::datafusion_common::DFSchema; use vegafusion_common::datatypes::{ cast_to, data_type, is_null_literal, is_numeric_datatype, is_string_datatype, to_numeric, @@ -110,7 +112,7 @@ pub fn compile_binary( // plus is string concatenation let lhs_string = to_string(lhs, schema)?; let rhs_string = to_string(rhs, schema)?; - concat(&[lhs_string, rhs_string]) + concat(vec![lhs_string, rhs_string]) } else { // Both sides are non-strings, use regular numeric plus operation // Use result of to_numeric to handle booleans diff --git a/vegafusion-runtime/src/expression/compiler/builtin_functions/array/length.rs b/vegafusion-runtime/src/expression/compiler/builtin_functions/array/length.rs index 6bb710190..694104e81 100644 --- a/vegafusion-runtime/src/expression/compiler/builtin_functions/array/length.rs +++ b/vegafusion-runtime/src/expression/compiler/builtin_functions/array/length.rs @@ -1,5 +1,8 @@ use datafusion_common::DFSchema; -use datafusion_expr::{expr, BuiltinScalarFunction, Expr, ExprSchemable, ScalarFunctionDefinition}; +use datafusion_expr::{expr, Expr, ExprSchemable}; + +use datafusion_functions::unicode::expr_fn::character_length; +use datafusion_functions_array::length::array_length; use vegafusion_common::arrow::datatypes::DataType; use vegafusion_common::error::{ResultWithContext, VegaFusionError}; @@ -15,17 +18,11 @@ pub fn length_transform( let len_expr = match dtype { DataType::Utf8 | DataType::LargeUtf8 => Ok(Expr::Cast(expr::Cast { - expr: Box::new(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::CharacterLength), - args: vec![arg], - })), + expr: Box::new(character_length(arg)), data_type: DataType::Float64 })), DataType::List(_) | DataType::LargeList(_) | DataType::FixedSizeList(_, _) => Ok(Expr::Cast(expr::Cast { - expr: Box::new(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::ArrayLength), - args: vec![arg], - })), + expr: Box::new(array_length(arg)), data_type: DataType::Float64 })), _ => Err(VegaFusionError::parse(format!( diff --git a/vegafusion-runtime/src/expression/compiler/builtin_functions/data/vl_selection_test.rs b/vegafusion-runtime/src/expression/compiler/builtin_functions/data/vl_selection_test.rs index ec7d6c047..4c8d43045 100644 --- a/vegafusion-runtime/src/expression/compiler/builtin_functions/data/vl_selection_test.rs +++ b/vegafusion-runtime/src/expression/compiler/builtin_functions/data/vl_selection_test.rs @@ -5,7 +5,8 @@ use std::convert::TryFrom; use crate::task_graph::timezone::RuntimeTzConfig; use datafusion_expr::expr::Case; -use datafusion_expr::{ceil, expr, lit, Between, Expr, ExprSchemable, ScalarFunctionDefinition}; +use datafusion_expr::{expr, lit, Between, Expr, ExprSchemable, ScalarFunctionDefinition}; +use datafusion_functions::expr_fn::ceil; use std::str::FromStr; use std::sync::Arc; use vegafusion_common::arrow::datatypes::{DataType, TimeUnit}; diff --git a/vegafusion-runtime/src/expression/compiler/builtin_functions/date_time/date_parts.rs b/vegafusion-runtime/src/expression/compiler/builtin_functions/date_time/date_parts.rs index 1a6cade0f..625d22c42 100644 --- a/vegafusion-runtime/src/expression/compiler/builtin_functions/date_time/date_parts.rs +++ b/vegafusion-runtime/src/expression/compiler/builtin_functions/date_time/date_parts.rs @@ -1,6 +1,7 @@ use crate::expression::compiler::call::TzTransformFn; use crate::task_graph::timezone::RuntimeTzConfig; -use datafusion_expr::{expr, floor, lit, Expr, ExprSchemable, ScalarFunctionDefinition}; +use datafusion_expr::{expr, lit, Expr, ExprSchemable, ScalarFunctionDefinition}; +use datafusion_functions::expr_fn::floor; use std::sync::Arc; use vegafusion_common::arrow::datatypes::{DataType, TimeUnit}; use vegafusion_common::datafusion_common::DFSchema; diff --git a/vegafusion-runtime/src/expression/compiler/call.rs b/vegafusion-runtime/src/expression/compiler/call.rs index 4c6b70461..bd2acdc4a 100644 --- a/vegafusion-runtime/src/expression/compiler/call.rs +++ b/vegafusion-runtime/src/expression/compiler/call.rs @@ -6,11 +6,13 @@ use crate::expression::compiler::builtin_functions::date_time::datetime::{ use crate::expression::compiler::builtin_functions::type_checking::isvalid::is_valid_fn; use crate::expression::compiler::compile; use crate::expression::compiler::config::CompilationConfig; -use datafusion_expr::{expr, BuiltinScalarFunction, Expr, ScalarFunctionDefinition, ScalarUDF}; +use datafusion_expr::{expr, Expr, ScalarFunctionDefinition, ScalarUDF}; use datafusion_functions::expr_fn::isnan; +use datafusion_functions::math::{ + abs, acos, asin, atan, ceil, cos, exp, floor, ln, power, round, sin, sqrt, tan, +}; use std::collections::HashMap; use std::ops::Deref; -use std::str::FromStr; use std::sync::Arc; use vegafusion_common::arrow::datatypes::DataType; use vegafusion_common::data::table::VegaFusionTable; @@ -77,16 +79,9 @@ pub enum VegaFusionCallable { /// produces a new expression. UtcTransform(TzTransformFn), - /// Runtime function that is build in to DataFusion - BuiltinScalarFunction { - function: BuiltinScalarFunction, - /// If Some, all arguments should be cast to provided type - cast: Option, - }, - /// A custom runtime function that's not built into DataFusion ScalarUDF { - udf: ScalarUDF, + udf: Arc, /// If Some, all arguments should be cast to provided type cast: Option, }, @@ -138,14 +133,7 @@ pub fn compile_call( VegaFusionCallable::ScalarUDF { udf, cast } => { let args = compile_scalar_arguments(node, config, schema, cast)?; Ok(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::UDF(Arc::new(udf.clone())), - args, - })) - } - VegaFusionCallable::BuiltinScalarFunction { function, cast } => { - let args = compile_scalar_arguments(node, config, schema, cast)?; - Ok(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(*function), + func_def: ScalarFunctionDefinition::UDF(udf.clone()), args, })) } @@ -225,31 +213,32 @@ pub fn default_callables() -> HashMap { let mut callables: HashMap = HashMap::new(); callables.insert("if".to_string(), VegaFusionCallable::Macro(Arc::new(if_fn))); - // Numeric functions built into DataFusion with names that match Vega. - // Cast arguments to Float64 - for fun_name in &[ - "abs", "acos", "asin", "atan", "ceil", "cos", "exp", "floor", "round", "sin", "sqrt", - "tan", "pow", + // Numeric functions built into DataFusion with mapping to Vega names + for (fun_name, udf) in [ + ("abs", abs()), + ("acos", acos()), + ("asin", asin()), + ("atan", atan()), + ("ceil", ceil()), + ("cos", cos()), + ("exp", exp()), + ("floor", floor()), + ("round", round()), + ("sin", sin()), + ("sqrt", sqrt()), + ("tan", tan()), + ("pow", power()), + ("log", ln()), // Vega log is DataFusion ln ] { - let function = BuiltinScalarFunction::from_str(fun_name).unwrap(); callables.insert( fun_name.to_string(), - VegaFusionCallable::BuiltinScalarFunction { - function, + VegaFusionCallable::ScalarUDF { + udf, cast: Some(DataType::Float64), }, ); } - // DataFusion ln is Vega log - callables.insert( - "log".to_string(), - VegaFusionCallable::BuiltinScalarFunction { - function: BuiltinScalarFunction::Ln, - cast: Some(DataType::Float64), - }, - ); - callables.insert( "isNaN".to_string(), VegaFusionCallable::UnaryTransform(Arc::new(isnan)), @@ -278,7 +267,7 @@ pub fn default_callables() -> HashMap { callables.insert( "span".to_string(), VegaFusionCallable::ScalarUDF { - udf: ScalarUDF::from(SpanUDF::new()), + udf: Arc::new(ScalarUDF::from(SpanUDF::new())), cast: None, }, ); @@ -286,7 +275,7 @@ pub fn default_callables() -> HashMap { callables.insert( "indexof".to_string(), VegaFusionCallable::ScalarUDF { - udf: ScalarUDF::from(IndexOfUDF::new()), + udf: Arc::new(ScalarUDF::from(IndexOfUDF::new())), cast: None, }, ); diff --git a/vegafusion-runtime/src/expression/compiler/member.rs b/vegafusion-runtime/src/expression/compiler/member.rs index 0b153a6a8..860843111 100644 --- a/vegafusion-runtime/src/expression/compiler/member.rs +++ b/vegafusion-runtime/src/expression/compiler/member.rs @@ -2,7 +2,8 @@ use crate::expression::compiler::builtin_functions::array::length::length_transf use crate::expression::compiler::compile; use crate::expression::compiler::config::CompilationConfig; use crate::expression::compiler::utils::ExprHelpers; -use datafusion_expr::{expr, lit, BuiltinScalarFunction, Expr, ScalarFunctionDefinition}; +use datafusion_expr::{expr, lit, Expr, ScalarFunctionDefinition}; +use datafusion_functions::expr_fn::substring; use std::convert::TryFrom; use std::sync::Arc; use vegafusion_common::arrow::array::Int64Array; @@ -90,10 +91,7 @@ pub fn compile_member( } else if matches!(dtype, DataType::Utf8 | DataType::LargeUtf8) { if let Some(index) = index { // SQL substr function is 1-indexed so add one - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Substr), - args: vec![compiled_object, lit((index + 1) as i64), lit(1i64)], - }) + substring(compiled_object, lit((index + 1) as i64), lit(1i64)) } else { return Err(VegaFusionError::compilation(format!( "Non-numeric element index: {property_string}" diff --git a/vegafusion-runtime/src/expression/compiler/mod.rs b/vegafusion-runtime/src/expression/compiler/mod.rs index b77420a06..d7b66bfd9 100644 --- a/vegafusion-runtime/src/expression/compiler/mod.rs +++ b/vegafusion-runtime/src/expression/compiler/mod.rs @@ -60,15 +60,15 @@ mod test_compile { use crate::expression::compiler::compile; use crate::expression::compiler::config::CompilationConfig; use crate::expression::compiler::utils::ExprHelpers; + use datafusion_functions::expr_fn::{coalesce, concat}; + use datafusion_functions_array::expr_fn::make_array; use vegafusion_core::expression::parser::parse; use crate::task_graph::timezone::RuntimeTzConfig; use datafusion_common::utils::array_into_list_array; use datafusion_common::{DFSchema, ScalarValue}; use datafusion_expr::expr::{BinaryExpr, Case, TryCast}; - use datafusion_expr::{ - concat, expr, lit, BuiltinScalarFunction, Expr, Operator, ScalarFunctionDefinition, - }; + use datafusion_expr::{expr, lit, not, Expr, Operator, ScalarFunctionDefinition}; use std::collections::HashMap; use std::convert::TryFrom; @@ -174,16 +174,13 @@ mod test_compile { println!("expr: {result_expr:?}"); // unary not should cast numeric value to boolean - let expected_expr = !Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Coalesce), - args: vec![ - Expr::TryCast(TryCast { - expr: Box::new(lit(32.0)), - data_type: DataType::Boolean, - }), - lit(false), - ], - }); + let expected_expr = not(coalesce(vec![ + Expr::TryCast(TryCast { + expr: Box::new(lit(32.0)), + data_type: DataType::Boolean, + }), + lit(false), + ])); assert_eq!(result_expr, expected_expr); @@ -200,20 +197,16 @@ mod test_compile { let expr = parse("32? 7: 9").unwrap(); let result_expr = compile(&expr, &Default::default(), None).unwrap(); println!("expr: {result_expr:?}"); - let expected_expr = Expr::Case(Case { expr: None, when_then_expr: vec![( - Box::new(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Coalesce), - args: vec![ - Expr::TryCast(TryCast { - expr: Box::new(lit(32.0)), - data_type: DataType::Boolean, - }), - lit(false), - ], - })), + Box::new(coalesce(vec![ + Expr::TryCast(TryCast { + expr: Box::new(lit(32.0)), + data_type: DataType::Boolean, + }), + lit(false), + ])), Box::new(lit(7.0)), )], else_expr: Some(Box::new(lit(9.0))), @@ -259,16 +252,13 @@ mod test_compile { let expected_expr = Expr::Case(Case { expr: None, when_then_expr: vec![( - Box::new(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Coalesce), - args: vec![ - Expr::TryCast(TryCast { - expr: Box::new(lit(5.0)), - data_type: DataType::Boolean, - }), - lit(false), - ], - })), + Box::new(coalesce(vec![ + Expr::TryCast(TryCast { + expr: Box::new(lit(5.0)), + data_type: DataType::Boolean, + }), + lit(false), + ])), Box::new(lit(55.0)), )], else_expr: Some(Box::new(lit(5.0))), @@ -332,7 +322,7 @@ mod test_compile { let expr = parse("'2' + '4'").unwrap(); let result_expr = compile(&expr, &Default::default(), None).unwrap(); - let expected_expr = concat(&[lit("2"), lit("4")]); + let expected_expr = concat(vec![lit("2"), lit("4")]); println!("expr: {result_expr:?}"); assert_eq!(result_expr, expected_expr); @@ -391,11 +381,7 @@ mod test_compile { fn test_compile_array_numeric() { let expr = parse("[1, 2, 3]").unwrap(); let result_expr = compile(&expr, &Default::default(), None).unwrap(); - - let expected_expr = Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: vec![lit(1.0), lit(2.0), lit(3.0)], - }); + let expected_expr = make_array(vec![lit(1.0), lit(2.0), lit(3.0)]); println!("expr: {result_expr:?}"); assert_eq!(result_expr, expected_expr); @@ -415,10 +401,7 @@ mod test_compile { let expr = parse("[]").unwrap(); let result_expr = compile(&expr, &Default::default(), None).unwrap(); - let expected_expr = Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: vec![], - }); + let expected_expr = make_array(vec![]); println!("expr: {result_expr:?}"); assert_eq!(result_expr, expected_expr); @@ -436,23 +419,12 @@ mod test_compile { let expr = parse("[[1, 2], [3, 4], [5, 6]]").unwrap(); let result_expr = compile(&expr, &Default::default(), None).unwrap(); - let expected_expr = Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: vec![ - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: vec![lit(1.0), lit(2.0)], - }), - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: vec![lit(3.0), lit(4.0)], - }), - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::MakeArray), - args: vec![lit(5.0), lit(6.0)], - }), - ], - }); + let expected_expr = make_array(vec![ + make_array(vec![lit(1.0), lit(2.0)]), + make_array(vec![lit(3.0), lit(4.0)]), + make_array(vec![lit(5.0), lit(6.0)]), + ]); + println!("expr: {result_expr:?}"); assert_eq!(result_expr, expected_expr); @@ -602,16 +574,13 @@ mod test_compile { let expected_expr = Expr::Case(Case { expr: None, when_then_expr: vec![( - Box::new(Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Coalesce), - args: vec![ - Expr::TryCast(TryCast { - expr: Box::new(lit(32.0)), - data_type: DataType::Boolean, - }), - lit(false), - ], - })), + Box::new(coalesce(vec![ + Expr::TryCast(TryCast { + expr: Box::new(lit(32.0)), + data_type: DataType::Boolean, + }), + lit(false), + ])), Box::new(lit(7.0)), )], else_expr: Some(Box::new(lit(9.0))), diff --git a/vegafusion-runtime/src/transform/aggregate.rs b/vegafusion-runtime/src/transform/aggregate.rs index 11b7e776a..cb77369ef 100644 --- a/vegafusion-runtime/src/transform/aggregate.rs +++ b/vegafusion-runtime/src/transform/aggregate.rs @@ -2,6 +2,7 @@ use crate::expression::compiler::config::CompilationConfig; use crate::transform::TransformTrait; use datafusion_expr::{avg, count, count_distinct, lit, max, min, sum, Expr}; +use sqlparser::ast::NullTreatment; use std::collections::HashMap; use async_trait::async_trait; @@ -127,7 +128,7 @@ pub fn make_aggr_expr_for_named_col( ) -> Result { let column = if let Some(col_name) = col_name { let col_name = unescape_field(&col_name); - if schema.index_of_column_by_name(None, &col_name).is_err() { + if schema.index_of_column_by_name(None, &col_name).is_none() { // No column with specified name, short circuit to return default value return if matches!(op, AggregateOp::Sum | AggregateOp::Count) { // return zero for sum and count @@ -170,6 +171,7 @@ pub fn make_agg_expr_for_col_expr( args: vec![numeric_column()?], filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), AggregateOp::Variance => Expr::AggregateFunction(expr::AggregateFunction { func_def: AggregateFunctionDefinition::BuiltIn( @@ -179,6 +181,7 @@ pub fn make_agg_expr_for_col_expr( args: vec![numeric_column()?], filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), AggregateOp::Variancep => Expr::AggregateFunction(expr::AggregateFunction { func_def: AggregateFunctionDefinition::BuiltIn( @@ -188,6 +191,7 @@ pub fn make_agg_expr_for_col_expr( args: vec![numeric_column()?], filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), AggregateOp::Stdev => Expr::AggregateFunction(expr::AggregateFunction { func_def: AggregateFunctionDefinition::BuiltIn( @@ -197,6 +201,7 @@ pub fn make_agg_expr_for_col_expr( args: vec![numeric_column()?], filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), AggregateOp::Stdevp => Expr::AggregateFunction(expr::AggregateFunction { func_def: AggregateFunctionDefinition::BuiltIn( @@ -206,6 +211,7 @@ pub fn make_agg_expr_for_col_expr( args: vec![numeric_column()?], filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), AggregateOp::Valid => { let valid = Expr::Cast(expr::Cast { @@ -235,6 +241,7 @@ pub fn make_agg_expr_for_col_expr( distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), AggregateOp::Q3 => Expr::AggregateFunction(expr::AggregateFunction { func_def: AggregateFunctionDefinition::UDF(Arc::new((*Q3_UDF).clone())), @@ -242,6 +249,7 @@ pub fn make_agg_expr_for_col_expr( distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }), _ => { return Err(VegaFusionError::specification(format!( diff --git a/vegafusion-runtime/src/transform/bin.rs b/vegafusion-runtime/src/transform/bin.rs index 66b6d4fed..834e56a3f 100644 --- a/vegafusion-runtime/src/transform/bin.rs +++ b/vegafusion-runtime/src/transform/bin.rs @@ -9,7 +9,8 @@ use datafusion_expr::lit; use datafusion_common::scalar::ScalarValue; use datafusion_common::utils::array_into_list_array; use datafusion_common::DFSchema; -use datafusion_expr::{abs, floor, when, Expr}; +use datafusion_expr::{when, Expr}; +use datafusion_functions::expr_fn::{abs, floor}; use float_cmp::approx_eq; use std::ops::{Add, Div, Mul, Sub}; use std::sync::Arc; diff --git a/vegafusion-runtime/src/transform/collect.rs b/vegafusion-runtime/src/transform/collect.rs index 8e067a09f..c948fb42b 100644 --- a/vegafusion-runtime/src/transform/collect.rs +++ b/vegafusion-runtime/src/transform/collect.rs @@ -2,6 +2,7 @@ use crate::expression::compiler::config::CompilationConfig; use crate::transform::TransformTrait; use datafusion_expr::{expr, Expr, WindowFunctionDefinition}; +use sqlparser::ast::NullTreatment; use std::sync::Arc; use vegafusion_core::error::{Result, ResultWithContext}; @@ -49,6 +50,7 @@ impl TransformTrait for Collect { partition_by: vec![], order_by: sort_exprs, window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(ORDER_COL); diff --git a/vegafusion-runtime/src/transform/identifier.rs b/vegafusion-runtime/src/transform/identifier.rs index 1398f1e7b..4877fc62e 100644 --- a/vegafusion-runtime/src/transform/identifier.rs +++ b/vegafusion-runtime/src/transform/identifier.rs @@ -3,6 +3,7 @@ use crate::transform::TransformTrait; use async_trait::async_trait; use datafusion_expr::{expr, BuiltInWindowFunction, Expr, WindowFrame, WindowFunctionDefinition}; +use sqlparser::ast::NullTreatment; use std::sync::Arc; use vegafusion_common::column::flat_col; use vegafusion_common::data::ORDER_COL; @@ -29,6 +30,7 @@ impl TransformTrait for Identifier { nulls_first: false, })], window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(&self.r#as); diff --git a/vegafusion-runtime/src/transform/pivot.rs b/vegafusion-runtime/src/transform/pivot.rs index e9ee57381..c53d098e6 100644 --- a/vegafusion-runtime/src/transform/pivot.rs +++ b/vegafusion-runtime/src/transform/pivot.rs @@ -2,7 +2,8 @@ use crate::expression::compiler::config::CompilationConfig; use crate::transform::aggregate::make_agg_expr_for_col_expr; use crate::transform::TransformTrait; use async_trait::async_trait; -use datafusion_expr::{coalesce, expr::Sort, lit, min, when, Expr}; +use datafusion_expr::{expr::Sort, lit, min, when, Expr}; +use datafusion_functions::expr_fn::coalesce; use std::sync::Arc; use vegafusion_common::arrow::array::StringArray; use vegafusion_common::arrow::datatypes::DataType; diff --git a/vegafusion-runtime/src/transform/timeunit.rs b/vegafusion-runtime/src/transform/timeunit.rs index ad99aa591..d33ae7f1d 100644 --- a/vegafusion-runtime/src/transform/timeunit.rs +++ b/vegafusion-runtime/src/transform/timeunit.rs @@ -2,6 +2,7 @@ use crate::expression::compiler::config::CompilationConfig; use crate::transform::TransformTrait; use async_trait::async_trait; use datafusion_common::DFSchema; +use datafusion_functions::expr_fn::floor; use std::collections::HashSet; use std::ops::{Add, Div, Mul, Sub}; use std::sync::Arc; @@ -11,7 +12,7 @@ use vegafusion_core::proto::gen::transforms::{TimeUnit, TimeUnitTimeZone, TimeUn use vegafusion_core::task_graph::task_value::TaskValue; use datafusion_expr::expr::Cast; -use datafusion_expr::{expr, floor, lit, Expr, ExprSchemable, ScalarFunctionDefinition}; +use datafusion_expr::{expr, lit, Expr, ExprSchemable, ScalarFunctionDefinition}; use itertools::Itertools; use vegafusion_common::column::{flat_col, unescaped_col}; use vegafusion_common::datatypes::{cast_to, is_numeric_datatype}; diff --git a/vegafusion-runtime/src/transform/window.rs b/vegafusion-runtime/src/transform/window.rs index 05fc57a0d..33665277d 100644 --- a/vegafusion-runtime/src/transform/window.rs +++ b/vegafusion-runtime/src/transform/window.rs @@ -4,6 +4,7 @@ use async_trait::async_trait; use datafusion_common::ScalarValue; use datafusion_expr::{aggregate_function, expr, lit, Expr, WindowFrame, WindowFunctionDefinition}; +use sqlparser::ast::NullTreatment; use std::sync::Arc; use vegafusion_core::error::Result; use vegafusion_core::proto::gen::transforms::{ @@ -43,7 +44,7 @@ impl TransformTrait for Window { .schema_df()? .fields() .iter() - .map(|f| flat_col(f.field().name())) + .map(|f| flat_col(f.name())) .collect(); if order_by.is_empty() { @@ -184,6 +185,7 @@ impl TransformTrait for Window { partition_by: partition_by.clone(), order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }); if let Some(alias) = self.aliases.get(i) { diff --git a/vegafusion-runtime/tests/test_chart_state.rs b/vegafusion-runtime/tests/test_chart_state.rs index 9059694f7..82ef107c1 100644 --- a/vegafusion-runtime/tests/test_chart_state.rs +++ b/vegafusion-runtime/tests/test_chart_state.rs @@ -1,3 +1,8 @@ +fn crate_dir() -> String { + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .display() + .to_string() +} #[cfg(test)] mod tests { use crate::crate_dir; @@ -138,9 +143,3 @@ mod tests { assert_eq!(response_updates, expected_updates) } } - -fn crate_dir() -> String { - std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .display() - .to_string() -} diff --git a/vegafusion-runtime/tests/test_destringify_selection_datasets.rs b/vegafusion-runtime/tests/test_destringify_selection_datasets.rs index 6fbcee054..5872cb657 100644 --- a/vegafusion-runtime/tests/test_destringify_selection_datasets.rs +++ b/vegafusion-runtime/tests/test_destringify_selection_datasets.rs @@ -1,3 +1,8 @@ +fn crate_dir() -> String { + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .display() + .to_string() +} #[cfg(test)] mod tests { use crate::crate_dir; @@ -60,9 +65,3 @@ mod tests { } } } - -fn crate_dir() -> String { - std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .display() - .to_string() -} diff --git a/vegafusion-runtime/tests/test_pre_transform_extract.rs b/vegafusion-runtime/tests/test_pre_transform_extract.rs index dcb811bc6..547dd0cdb 100644 --- a/vegafusion-runtime/tests/test_pre_transform_extract.rs +++ b/vegafusion-runtime/tests/test_pre_transform_extract.rs @@ -1,3 +1,8 @@ +fn crate_dir() -> String { + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .display() + .to_string() +} #[cfg(test)] mod tests { use crate::crate_dir; @@ -68,9 +73,3 @@ mod tests { ); } } - -fn crate_dir() -> String { - std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .display() - .to_string() -} diff --git a/vegafusion-runtime/tests/test_pre_transform_keep_variables.rs b/vegafusion-runtime/tests/test_pre_transform_keep_variables.rs index 35e9be0d8..32e5330c9 100644 --- a/vegafusion-runtime/tests/test_pre_transform_keep_variables.rs +++ b/vegafusion-runtime/tests/test_pre_transform_keep_variables.rs @@ -1,3 +1,8 @@ +fn crate_dir() -> String { + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .display() + .to_string() +} #[cfg(test)] mod tests { use crate::crate_dir; @@ -68,7 +73,7 @@ mod tests { assert!(warnings.is_empty()); assert_eq!(tx_spec.signals.len(), 1); - let signal0 = tx_spec.signals.get(0).unwrap(); + let signal0 = tx_spec.signals.first().unwrap(); assert_eq!( signal0.name, "layer_0_layer_0_bin_maxbins_10_IMDB_Rating_bins" @@ -95,9 +100,3 @@ mod tests { } } } - -fn crate_dir() -> String { - std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .display() - .to_string() -} diff --git a/vegafusion-runtime/tests/test_pre_transform_values.rs b/vegafusion-runtime/tests/test_pre_transform_values.rs index 7de4f92c7..f9ffa6284 100644 --- a/vegafusion-runtime/tests/test_pre_transform_values.rs +++ b/vegafusion-runtime/tests/test_pre_transform_values.rs @@ -1,3 +1,18 @@ +fn crate_dir() -> String { + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .display() + .to_string() +} + +fn setup_s3_environment_vars() { + unsafe { + std::env::set_var("AWS_DEFAULT_REGION", "us-east-1"); + std::env::set_var("AWS_ACCESS_KEY_ID", "access_key123"); + std::env::set_var("AWS_SECRET_ACCESS_KEY", "secret_key123"); + std::env::set_var("AWS_ENDPOINT", "http://127.0.0.1:9000"); + std::env::set_var("AWS_ALLOW_HTTP", "true"); + } +} #[cfg(test)] mod tests { use crate::{crate_dir, setup_s3_environment_vars}; @@ -421,17 +436,3 @@ mod tests { } } } - -fn crate_dir() -> String { - std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .display() - .to_string() -} - -fn setup_s3_environment_vars() { - std::env::set_var("AWS_DEFAULT_REGION", "us-east-1"); - std::env::set_var("AWS_ACCESS_KEY_ID", "access_key123"); - std::env::set_var("AWS_SECRET_ACCESS_KEY", "secret_key123"); - std::env::set_var("AWS_ENDPOINT", "http://127.0.0.1:9000"); - std::env::set_var("AWS_ALLOW_HTTP", "true"); -} diff --git a/vegafusion-runtime/tests/test_projection_pushdown.rs b/vegafusion-runtime/tests/test_projection_pushdown.rs index 96584e597..886cfa464 100644 --- a/vegafusion-runtime/tests/test_projection_pushdown.rs +++ b/vegafusion-runtime/tests/test_projection_pushdown.rs @@ -1,3 +1,8 @@ +fn crate_dir() -> String { + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .display() + .to_string() +} #[cfg(test)] mod test_custom_specs { use crate::crate_dir; @@ -53,9 +58,3 @@ mod test_custom_specs { } } } - -fn crate_dir() -> String { - std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .display() - .to_string() -} diff --git a/vegafusion-runtime/tests/util/check.rs b/vegafusion-runtime/tests/util/check.rs index ad7a27800..84dab9388 100644 --- a/vegafusion-runtime/tests/util/check.rs +++ b/vegafusion-runtime/tests/util/check.rs @@ -131,7 +131,7 @@ pub fn eval_vegafusion_transforms( .unwrap(); let result_signals = result_signals .into_iter() - .map(|v| v.as_scalar().map(|v| v.clone())) + .map(|v| v.as_scalar().cloned()) .collect::>>() .unwrap(); (result_data, result_signals) diff --git a/vegafusion-sql/Cargo.toml b/vegafusion-sql/Cargo.toml index cadde54d5..f17a3a5cf 100644 --- a/vegafusion-sql/Cargo.toml +++ b/vegafusion-sql/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" description = "VegaFusion SQL dialect generation and connection implementations" [features] -datafusion-conn = [ "datafusion", "tempfile", "reqwest", "reqwest-retry", "reqwest-middleware", "vegafusion-datafusion-udfs", "object_store", "url", "vegafusion-common/object_store",] +datafusion-conn = [ "datafusion", "tempfile", "reqwest", "reqwest-retry", "reqwest-middleware", "vegafusion-datafusion-udfs", "object_store", "url", "vegafusion-common/object_store", "vegafusion-common/prettyprint",] pyarrow = [ "pyo3", "datafusion-common/pyarrow", "vegafusion-common/pyarrow", "vegafusion-dataframe/pyarrow",] [dependencies] @@ -43,7 +43,7 @@ optional = true [dependencies.arrow] workspace = true -default_features = false +default-features = false features = [ "ipc",] [dependencies.datafusion-common] @@ -52,6 +52,9 @@ workspace = true [dependencies.datafusion-expr] workspace = true +[dependencies.datafusion-functions] +workspace = true + [dependencies.datafusion] workspace = true optional = true diff --git a/vegafusion-sql/src/compile/expr.rs b/vegafusion-sql/src/compile/expr.rs index 4b54cdec6..c0e728530 100644 --- a/vegafusion-sql/src/compile/expr.rs +++ b/vegafusion-sql/src/compile/expr.rs @@ -4,7 +4,7 @@ use arrow::datatypes::DataType; use datafusion_common::{DFSchema, ScalarValue}; use sqlparser::ast::{ BinaryOperator as SqlBinaryOperator, Expr as SqlExpr, Function as SqlFunction, - FunctionArg as SqlFunctionArg, FunctionArg, Ident, ObjectName as SqlObjectName, ObjectName, + FunctionArg as SqlFunctionArg, Ident, ObjectName as SqlObjectName, UnaryOperator as SqlUnaryOperator, WindowFrame as SqlWindowFrame, WindowFrameBound as SqlWindowBound, WindowFrameUnits as SqlWindowFrameUnits, WindowSpec as SqlWindowSpec, WindowType, @@ -318,6 +318,7 @@ impl ToSqlExpr for Expr { partition_by, order_by, window_frame, + null_treatment: _, }) => { // Extract function name let (fun_name, supports_frame) = match fun { @@ -438,10 +439,11 @@ impl ToSqlExpr for Expr { partition_by, order_by, window_frame: sql_window_frame, + window_name: None, }); let sql_fun = SqlFunction { - name: ObjectName(vec![Ident { + name: SqlObjectName(vec![Ident { value: fun_name, quote_style: None, }]), @@ -603,7 +605,7 @@ fn translate_function_args( args: &[Expr], dialect: &Dialect, schema: &DFSchema, -) -> Result> { +) -> Result> { args.iter() .map(|expr| { Ok(SqlFunctionArg::Unnamed( @@ -642,9 +644,9 @@ mod tests { use arrow::datatypes::DataType; use datafusion_common::DFSchema; use datafusion_expr::expr::Cast; - use datafusion_expr::{ - expr, lit, Between, BuiltinScalarFunction, Expr, ScalarFunctionDefinition, - }; + use datafusion_expr::{lit, Between, Expr}; + use datafusion_functions::expr_fn::sin; + use datafusion_functions::string::expr_fn::upper; use vegafusion_common::column::flat_col; fn schema() -> DFSchema { @@ -662,10 +664,7 @@ mod tests { #[test] pub fn test2() { - let df_expr = Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Sin), - args: vec![lit(1.2)], - }) + flat_col("B"); + let df_expr = sin(lit(1.2)) + flat_col("B"); let dialect: Dialect = Dialect::datafusion(); let sql_expr = df_expr.to_sql(&dialect, &schema()).unwrap(); @@ -676,11 +675,7 @@ mod tests { #[test] pub fn test3() { - let df_expr = Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Upper), - args: vec![lit("foo")], - }); - + let df_expr = upper(lit("foo")); let dialect: Dialect = Dialect::datafusion(); let sql_expr = df_expr.to_sql(&dialect, &schema()).unwrap(); println!("{sql_expr:?}"); diff --git a/vegafusion-sql/src/compile/scalar.rs b/vegafusion-sql/src/compile/scalar.rs index 94801421b..873b79565 100644 --- a/vegafusion-sql/src/compile/scalar.rs +++ b/vegafusion-sql/src/compile/scalar.rs @@ -255,6 +255,9 @@ impl ToSqlScalar for ScalarValue { ScalarValue::LargeList(_) => Err(VegaFusionError::internal( "LargeList cannot be converted to SQL", )), + ScalarValue::Union(_, _, _) => Err(VegaFusionError::internal( + "Union cannot be converted to SQL", + )), } } } diff --git a/vegafusion-sql/src/connection/datafusion_conn.rs b/vegafusion-sql/src/connection/datafusion_conn.rs index 24de7dd3c..735548399 100644 --- a/vegafusion-sql/src/connection/datafusion_conn.rs +++ b/vegafusion-sql/src/connection/datafusion_conn.rs @@ -4,6 +4,7 @@ use crate::dialect::Dialect; use arrow::datatypes::{DataType, Field, Schema, SchemaRef}; use arrow::ipc::reader::{FileReader, StreamReader}; use arrow::record_batch::RecordBatch; +use datafusion::config::TableOptions; use datafusion::datasource::listing::ListingTableUrl; use datafusion::datasource::MemTable; use datafusion::execution::context::SessionState; @@ -124,7 +125,11 @@ impl Connection for DataFusionConnection { let mut tables: HashMap = HashMap::new(); for table_name in schema_provider.table_names() { - let schema = schema_provider.table(&table_name).await.unwrap().schema(); + let schema = schema_provider + .table(&table_name) + .await? + .with_context(|| format!("Failed to get table {table_name}"))? + .schema(); tables.insert(table_name, schema.as_ref().clone()); } Ok(tables) @@ -380,7 +385,7 @@ impl SqlConnection for DataFusionConnection { .schema() .fields() .iter() - .map(|f| f.field().as_ref().clone().with_nullable(true)) + .map(|f| f.as_ref().clone().with_nullable(true)) .collect(); let expected_fields: Vec<_> = schema .fields @@ -434,7 +439,9 @@ async fn build_csv_schema( ctx: &SessionContext, ) -> Result { let table_path = ListingTableUrl::parse(uri.into().as_str())?; - let listing_options = csv_opts.to_listing_options(&ctx.copied_config()); + let listing_options = + csv_opts.to_listing_options(&ctx.copied_config(), TableOptions::default()); + let inferred_schema = listing_options .infer_schema(&ctx.state(), &table_path) .await?; diff --git a/vegafusion-sql/src/connection/datafusion_py_datasource.rs b/vegafusion-sql/src/connection/datafusion_py_datasource.rs index 23eb6d160..3c75d7ccd 100644 --- a/vegafusion-sql/src/connection/datafusion_py_datasource.rs +++ b/vegafusion-sql/src/connection/datafusion_py_datasource.rs @@ -4,9 +4,11 @@ use async_trait::async_trait; use datafusion::datasource::TableProvider; use datafusion::execution::context::SessionState; use datafusion::execution::{SendableRecordBatchStream, TaskContext}; -use datafusion::physical_expr::{Partitioning, PhysicalSortExpr}; +use datafusion::physical_expr::{EquivalenceProperties, Partitioning}; use datafusion::physical_plan::memory::MemoryStream; -use datafusion::physical_plan::{DisplayAs, DisplayFormatType, ExecutionPlan}; +use datafusion::physical_plan::{ + DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, PlanProperties, +}; use datafusion_common::{project_schema, DataFusionError, Statistics}; use datafusion_expr::{Expr, TableType}; use pyo3::types::PyTuple; @@ -76,16 +78,29 @@ impl TableProvider for PyDatasource { struct PyDatasourceExec { db: PyDatasource, projected_schema: SchemaRef, + plan_properties: PlanProperties, } impl PyDatasourceExec { fn new(projections: Option<&Vec>, schema: SchemaRef, db: PyDatasource) -> Self { let projected_schema = project_schema(&schema, projections).unwrap(); + let plan_properties = Self::compute_properties(projected_schema.clone()); Self { db, projected_schema, + plan_properties, } } + + /// This function creates the cache object that stores the plan properties such as schema, equivalence properties, ordering, partitioning, etc. + fn compute_properties(schema: SchemaRef) -> PlanProperties { + let eq_properties = EquivalenceProperties::new(schema); + PlanProperties::new( + eq_properties, + Partitioning::UnknownPartitioning(1), + ExecutionMode::Bounded, + ) + } } impl DisplayAs for PyDatasourceExec { @@ -103,14 +118,6 @@ impl ExecutionPlan for PyDatasourceExec { self.projected_schema.clone() } - fn output_partitioning(&self) -> Partitioning { - Partitioning::UnknownPartitioning(1) - } - - fn output_ordering(&self) -> Option<&[PhysicalSortExpr]> { - None - } - fn children(&self) -> Vec> { Vec::new() } @@ -151,4 +158,8 @@ impl ExecutionPlan for PyDatasourceExec { fn statistics(&self) -> datafusion_common::Result { Ok(Statistics::new_unknown(self.schema().as_ref())) } + + fn properties(&self) -> &PlanProperties { + &self.plan_properties + } } diff --git a/vegafusion-sql/src/dataframe/mod.rs b/vegafusion-sql/src/dataframe/mod.rs index 7d6eacdb8..f7ab036f3 100644 --- a/vegafusion-sql/src/dataframe/mod.rs +++ b/vegafusion-sql/src/dataframe/mod.rs @@ -6,16 +6,16 @@ use crate::dialect::{Dialect, ValuesMode}; use arrow::datatypes::{DataType, Field, FieldRef, Fields, Schema, SchemaRef}; use arrow::record_batch::RecordBatch; use async_trait::async_trait; -use datafusion_common::{Column, DFSchema, OwnedTableReference, ScalarValue}; +use datafusion_common::{Column, DFSchema, ScalarValue, TableReference}; use datafusion_expr::expr::AggregateFunctionDefinition; use datafusion_expr::{ - abs, expr, is_null, lit, max, min, when, AggregateFunction, BuiltInWindowFunction, - BuiltinScalarFunction, Expr, ExprSchemable, ScalarFunctionDefinition, WindowFrame, - WindowFunctionDefinition, + expr, is_null, lit, max, min, when, AggregateFunction, BuiltInWindowFunction, Expr, + ExprSchemable, WindowFrame, WindowFunctionDefinition, }; +use datafusion_functions::expr_fn::{abs, coalesce}; use sqlparser::ast::{ - Cte, Expr as SqlExpr, GroupByExpr, Ident, Query, Select, SelectItem, SetExpr, Statement, - TableAlias, TableFactor, TableWithJoins, Values, WildcardAdditionalOptions, With, + Cte, Expr as SqlExpr, GroupByExpr, Ident, NullTreatment, Query, Select, SelectItem, SetExpr, + Statement, TableAlias, TableFactor, TableWithJoins, Values, WildcardAdditionalOptions, With, }; use sqlparser::parser::Parser; use std::any::Any; @@ -272,6 +272,7 @@ impl SqlDataFrame { } expr_selects.push(Select { + value_table_mode: None, distinct: None, top: None, projection, @@ -395,6 +396,7 @@ impl SqlDataFrame { having: None, qualify: None, named_window: Default::default(), + value_table_mode: None, })); Query { with: None, @@ -591,7 +593,7 @@ impl SqlDataFrame { .map(|col| { let col = Expr::Column(Column { relation: if self.dialect().joinaggregate_fully_qualified { - Some(OwnedTableReference::bare(inner_name.clone())) + Some(TableReference::bare(inner_name.clone())) } else { None }, @@ -615,7 +617,7 @@ impl SqlDataFrame { } else { let expr = Expr::Column(Column { relation: if self.dialect().joinaggregate_fully_qualified { - Some(OwnedTableReference::bare(self.parent_name())) + Some(TableReference::bare(self.parent_name())) } else { None }, @@ -845,6 +847,7 @@ impl SqlDataFrame { }), ], window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(order_field); @@ -888,15 +891,12 @@ impl SqlDataFrame { .map(|f| f.name().clone()) .collect(); - // let dialect = self.dialect(); - // Build partitioning column expressions let partition_by: Vec<_> = groupby.iter().map(|group| flat_col(group)).collect(); - - let numeric_field = Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Coalesce), - args: vec![to_numeric(flat_col(field), &self.schema_df()?)?, lit(0.0)], - }); + let numeric_field = coalesce(vec![ + to_numeric(flat_col(field), &self.schema_df()?)?, + lit(0.0), + ]); if let StackMode::Zero = mode { // Build window expression @@ -908,6 +908,7 @@ impl SqlDataFrame { partition_by, order_by: orderby, window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(stop_field); @@ -982,6 +983,7 @@ impl SqlDataFrame { distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("__total"); let total_agg_str = total_agg @@ -1034,6 +1036,7 @@ impl SqlDataFrame { partition_by, order_by: orderby, window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(cumulative_field); @@ -1132,35 +1135,37 @@ impl SqlDataFrame { groupby: &[String], order_field: Option<&str>, ) -> Result> { + let schema = self.schema(); // Store the schema in a variable + let (_, field_field) = schema + .column_with_name(field) + .with_context(|| format!("No field named {}", field.to_string()))?; + let field_type = field_field.data_type(); + if groupby.is_empty() { // Value replacement for field with no groupby fields specified is equivalent to replacing // null values of that column with the fill value - let select_columns: Vec<_> = self - .schema() + let select_columns = schema .fields() .iter() .map(|f| { let col_name = f.name(); - if col_name == field { - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn( - BuiltinScalarFunction::Coalesce, - ), - args: vec![flat_col(field), lit(value.clone())], - }) + Ok(if col_name == field { + coalesce(vec![ + flat_col(field), + lit(value.clone()).cast_to(&field_type, &self.schema_df()?)?, + ]) .alias(col_name) } else { flat_col(col_name) - } + }) }) - .collect(); + .collect::>>()?; self.select(select_columns).await } else { // Save off names of columns in the original input DataFrame - let original_columns: Vec<_> = self - .schema() + let original_columns: Vec<_> = schema .fields() .iter() .map(|field| field.name().clone()) @@ -1186,22 +1191,20 @@ impl SqlDataFrame { // Build final selection // Finally, select all of the original DataFrame columns, filling in missing values // of the `field` columns - let select_columns: Vec<_> = original_columns + let select_columns = original_columns .iter() .map(|col_name| { - if col_name == field { - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn( - BuiltinScalarFunction::Coalesce, - ), - args: vec![flat_col(field), lit(value.clone())], - }) + Ok(if col_name == field { + coalesce(vec![ + flat_col(field), + lit(value.clone()).cast_to(&field_type, &self.schema_df()?)?, + ]) .alias(col_name) } else { flat_col(col_name) - } + }) }) - .collect(); + .collect::>>()?; let select_column_strs: Vec<_> = if self.dialect().impute_fully_qualified { // Some dialects (e.g. Clickhouse) require that references to columns in nested @@ -1210,22 +1213,20 @@ impl SqlDataFrame { .iter() .map(|col_name| { let expr = if col_name == field { - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn( - BuiltinScalarFunction::Coalesce, - ), - args: vec![flat_col(field), lit(value.clone())], - }) + coalesce(vec![ + flat_col(field), + lit(value.clone()).cast_to(&field_type, &self.schema_df()?)?, + ]) .alias(col_name) } else if col_name == key { Expr::Column(Column { - relation: Some(OwnedTableReference::bare("_key")), + relation: Some(TableReference::bare("_key")), name: col_name.clone(), }) .alias(col_name) } else if groupby.contains(col_name) { Expr::Column(Column { - relation: Some(OwnedTableReference::bare("_groups")), + relation: Some(TableReference::bare("_groups")), name: col_name.clone(), }) .alias(col_name) @@ -1343,6 +1344,7 @@ impl SqlDataFrame { partition_by: vec![], order_by, window_frame: WindowFrame::new(Some(true)), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias(order_field); @@ -1451,6 +1453,7 @@ fn query_chain_to_cte(queries: &[Query], prefix: &str) -> Query { }, query: Box::new(query.clone()), from: None, + materialized: None, } }) .collect(); diff --git a/vegafusion-sql/src/dialect/transforms/date_part_tz.rs b/vegafusion-sql/src/dialect/transforms/date_part_tz.rs index f4d98ace3..53d6bb8c6 100644 --- a/vegafusion-sql/src/dialect/transforms/date_part_tz.rs +++ b/vegafusion-sql/src/dialect/transforms/date_part_tz.rs @@ -65,7 +65,7 @@ pub fn part_to_date_time_field(part: &str) -> Result { Ok(match part.to_ascii_lowercase().as_str() { "year" | "years" => SqlDateTimeField::Year, "month" | "months " => SqlDateTimeField::Month, - "week" | "weeks" => SqlDateTimeField::Week, + "week" | "weeks" => SqlDateTimeField::Week(None), "day" | "days" => SqlDateTimeField::Day, "date" => SqlDateTimeField::Date, "hour" | "hours" => SqlDateTimeField::Hour, diff --git a/vegafusion-sql/tests/expected/impute.toml b/vegafusion-sql/tests/expected/impute.toml index b84247b7b..21e45dc9f 100644 --- a/vegafusion-sql/tests/expected/impute.toml +++ b/vegafusion-sql/tests/expected/impute.toml @@ -1,63 +1,33 @@ [unordered_no_groups] athena = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ bigquery = """ -WITH \ -values0 AS (SELECT 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), \ -values1 AS (SELECT coalesce(`a`, -1) AS `a`, `b`, `c`, `d` FROM values0) \ -SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), values1 AS (SELECT coalesce(`a`, CAST(-1 AS INT)) AS `a`, `b`, `c`, `d` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST """ clickhouse = """ -WITH \ -values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT coalesce("a", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ databricks = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), \ -values1 AS (SELECT coalesce(`a`, -1) AS `a`, `b`, `c`, `d` FROM values0) \ -SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), values1 AS (SELECT coalesce(`a`, CAST(-1 AS BIGINT)) AS `a`, `b`, `c`, `d` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST """ datafusion = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ duckdb = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ mysql = """ -WITH \ -values0 AS (SELECT * FROM (VALUES ROW(0, 28, 0, -1), ROW(0, 91, 1, -1), ROW(1, 43, 0, -2), ROW(NULL, 55, 1, -2), ROW(3, 19, 0, -3), ROW(2, 81, 0, -3), ROW(2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), \ -values1 AS (SELECT coalesce(`a`, -1) AS `a`, `b`, `c`, `d` FROM values0) \ -SELECT * FROM values1 ORDER BY `a` ASC, `b` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(0, 28, 0, -1), ROW(0, 91, 1, -1), ROW(1, 43, 0, -2), ROW(NULL, 55, 1, -2), ROW(3, 19, 0, -3), ROW(2, 81, 0, -3), ROW(2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), values1 AS (SELECT coalesce(`a`, CAST(-1 AS SIGNED)) AS `a`, `b`, `c`, `d` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC, `b` ASC """ postgres = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ redshift = """ -WITH \ -values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT coalesce("a", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ snowflake = """ -WITH \ -values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c", "COLUMN4" AS "d" FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4))), \ -values1 AS (SELECT coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c", "COLUMN4" AS "d" FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4))), values1 AS (SELECT coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ result = ''' +----+----+---+----+ @@ -75,64 +45,34 @@ result = ''' [unordered_one_group] athena = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ bigquery = """ -WITH \ -values0 AS (SELECT 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), \ -values1 AS (SELECT `a`, coalesce(`b`, -1) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)) \ -SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), values1 AS (SELECT `a`, coalesce(`b`, CAST(-1 AS INT)) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST """ clickhouse = """ -WITH \ -values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_key"."a" AS "a", coalesce("b", -1) AS "b", "_groups"."c" AS "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_key"."a" AS "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "_groups"."c" AS "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ databricks = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `a`, coalesce(`b`, -1) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)) \ -SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), values1 AS (SELECT `a`, coalesce(`b`, CAST(-1 AS BIGINT)) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST """ datafusion = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ duckdb = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ mysql = """ -WITH \ -values0 AS (SELECT * FROM (VALUES ROW(0, 28, 0, -1), ROW(0, 91, 1, -1), ROW(1, 43, 0, -2), ROW(NULL, 55, 1, -2), ROW(3, 19, 0, -3), ROW(2, 81, 0, -3), ROW(2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `a`, coalesce(`b`, -1) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)) \ -SELECT * FROM values1 ORDER BY `a` ASC, `b` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(0, 28, 0, -1), ROW(0, 91, 1, -1), ROW(1, 43, 0, -2), ROW(NULL, 55, 1, -2), ROW(3, 19, 0, -3), ROW(2, 81, 0, -3), ROW(2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), values1 AS (SELECT `a`, coalesce(`b`, CAST(-1 AS SIGNED)) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)) SELECT * FROM values1 ORDER BY `a` ASC, `b` ASC """ postgres = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ redshift = """ -WITH \ -values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ snowflake = """ -WITH \ -values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c", "COLUMN4" AS "d" FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4))), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST +WITH values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c", "COLUMN4" AS "d" FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4))), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST """ result = ''' +---+----+---+----+ @@ -151,64 +91,34 @@ result = ''' [unordered_two_groups] athena = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ bigquery = """ -WITH \ -values0 AS (SELECT 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), \ -values1 AS (SELECT `a`, coalesce(`b`, -1) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)) \ -SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST, `c` ASC NULLS FIRST, `d` ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), values1 AS (SELECT `a`, coalesce(`b`, CAST(-1 AS INT)) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST, `c` ASC NULLS FIRST, `d` ASC NULLS FIRST """ clickhouse = """ -WITH \ -values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_key"."a" AS "a", coalesce("b", -1) AS "b", "_groups"."c" AS "c", "_groups"."d" AS "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_key"."a" AS "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "_groups"."c" AS "c", "_groups"."d" AS "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ databricks = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `a`, coalesce(`b`, -1) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)) \ -SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST, `c` ASC NULLS FIRST, `d` ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), values1 AS (SELECT `a`, coalesce(`b`, CAST(-1 AS BIGINT)) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST, `b` ASC NULLS FIRST, `c` ASC NULLS FIRST, `d` ASC NULLS FIRST """ datafusion = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ duckdb = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ mysql = """ -WITH \ -values0 AS (SELECT * FROM (VALUES ROW(0, 28, 0, -1), ROW(0, 91, 1, -1), ROW(1, 43, 0, -2), ROW(NULL, 55, 1, -2), ROW(3, 19, 0, -3), ROW(2, 81, 0, -3), ROW(2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `a`, coalesce(`b`, -1) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)) \ -SELECT * FROM values1 ORDER BY `a` ASC, `b` ASC, `c` ASC, `d` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(0, 28, 0, -1), ROW(0, 91, 1, -1), ROW(1, 43, 0, -2), ROW(NULL, 55, 1, -2), ROW(3, 19, 0, -3), ROW(2, 81, 0, -3), ROW(2, 53, 1, -4)) AS `_values` (`a`, `b`, `c`, `d`)), values1 AS (SELECT `a`, coalesce(`b`, CAST(-1 AS SIGNED)) AS `b`, `c`, `d` FROM (SELECT `a` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)) SELECT * FROM values1 ORDER BY `a` ASC, `b` ASC, `c` ASC, `d` ASC """ postgres = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4)) AS "_values" ("a", "b", "c", "d")), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ redshift = """ -WITH \ -values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ snowflake = """ -WITH \ -values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c", "COLUMN4" AS "d" FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4))), \ -values1 AS (SELECT "a", coalesce("b", -1) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) \ -SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST +WITH values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c", "COLUMN4" AS "d" FROM (VALUES (0, 28, 0, -1), (0, 91, 1, -1), (1, 43, 0, -2), (NULL, 55, 1, -2), (3, 19, 0, -3), (2, 81, 0, -3), (2, 53, 1, -4))), values1 AS (SELECT "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d" FROM (SELECT "a" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST, "b" ASC NULLS FIRST, "c" ASC NULLS FIRST, "d" ASC NULLS FIRST """ result = ''' +---+----+---+----+ @@ -243,64 +153,34 @@ result = ''' [ordered_no_groups] athena = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ bigquery = """ -WITH \ -values0 AS (SELECT 1 AS `_order`, 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 2 AS `_order`, 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 3 AS `_order`, 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT 4 AS `_order`, NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 5 AS `_order`, 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 6 AS `_order`, 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 7 AS `_order`, 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), \ -values1 AS (SELECT `_order`, coalesce(`a`, -1) AS `a`, `b`, `c`, `d` FROM values0) \ -SELECT * FROM values1 ORDER BY `_order` ASC NULLS FIRST +WITH values0 AS (SELECT 1 AS `_order`, 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 2 AS `_order`, 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 3 AS `_order`, 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT 4 AS `_order`, NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 5 AS `_order`, 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 6 AS `_order`, 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 7 AS `_order`, 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), values1 AS (SELECT `_order`, coalesce(`a`, CAST(-1 AS INT)) AS `a`, `b`, `c`, `d` FROM values0) SELECT * FROM values1 ORDER BY `_order` ASC NULLS FIRST """ clickhouse = """ -WITH \ -values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_order", coalesce("a", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ databricks = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `_order`, coalesce(`a`, -1) AS `a`, `b`, `c`, `d` FROM values0) \ -SELECT * FROM values1 ORDER BY `_order` ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), values1 AS (SELECT `_order`, coalesce(`a`, CAST(-1 AS BIGINT)) AS `a`, `b`, `c`, `d` FROM values0) SELECT * FROM values1 ORDER BY `_order` ASC NULLS FIRST """ datafusion = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ duckdb = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ mysql = """ -WITH \ -values0 AS (SELECT * FROM (VALUES ROW(1, 0, 28, 0, -1), ROW(2, 0, 91, 1, -1), ROW(3, 1, 43, 0, -2), ROW(4, NULL, 55, 1, -2), ROW(5, 3, 19, 0, -3), ROW(6, 2, 81, 0, -3), ROW(7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `_order`, coalesce(`a`, -1) AS `a`, `b`, `c`, `d` FROM values0) \ -SELECT * FROM values1 ORDER BY `_order` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(1, 0, 28, 0, -1), ROW(2, 0, 91, 1, -1), ROW(3, 1, 43, 0, -2), ROW(4, NULL, 55, 1, -2), ROW(5, 3, 19, 0, -3), ROW(6, 2, 81, 0, -3), ROW(7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), values1 AS (SELECT `_order`, coalesce(`a`, CAST(-1 AS SIGNED)) AS `a`, `b`, `c`, `d` FROM values0) SELECT * FROM values1 ORDER BY `_order` ASC """ postgres = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ redshift = """ -WITH \ -values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_order", coalesce("a", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ snowflake = """ -WITH \ -values0 AS (SELECT "COLUMN1" AS "_order", "COLUMN2" AS "a", "COLUMN3" AS "b", "COLUMN4" AS "c", "COLUMN5" AS "d" FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4))), \ -values1 AS (SELECT "_order", coalesce("a", -1) AS "a", "b", "c", "d" FROM values0) \ -SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT "COLUMN1" AS "_order", "COLUMN2" AS "a", "COLUMN3" AS "b", "COLUMN4" AS "c", "COLUMN5" AS "d" FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4))), values1 AS (SELECT "_order", coalesce("a", CAST(-1 AS BIGINT)) AS "a", "b", "c", "d" FROM values0) SELECT * FROM values1 ORDER BY "_order" ASC NULLS FIRST """ result = ''' +--------+----+----+---+----+ @@ -318,81 +198,34 @@ result = ''' [ordered_one_group] athena = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ bigquery = """ -WITH \ -values0 AS (SELECT 1 AS `_order`, 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 2 AS `_order`, 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 3 AS `_order`, 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT 4 AS `_order`, NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 5 AS `_order`, 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 6 AS `_order`, 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 7 AS `_order`, 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), \ -values1 AS (SELECT `_order`, `a`, coalesce(`b`, -1) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)), \ -values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) \ -SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST - +WITH values0 AS (SELECT 1 AS `_order`, 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 2 AS `_order`, 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 3 AS `_order`, 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT 4 AS `_order`, NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 5 AS `_order`, 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 6 AS `_order`, 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 7 AS `_order`, 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), values1 AS (SELECT `_order`, `a`, coalesce(`b`, CAST(-1 AS INT)) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)), values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST """ clickhouse = """ -WITH \ -values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_order", "_key"."a" AS "a", coalesce("b", -1) AS "b", "_groups"."c" AS "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_order", "_key"."a" AS "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "_groups"."c" AS "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ databricks = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `_order`, `a`, coalesce(`b`, -1) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)), \ -values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) \ -SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), values1 AS (SELECT `_order`, `a`, coalesce(`b`, CAST(-1 AS BIGINT)) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)), values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST """ datafusion = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ duckdb = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ mysql = """ -WITH \ -values0 AS (SELECT * FROM (VALUES ROW(1, 0, 28, 0, -1), ROW(2, 0, 91, 1, -1), ROW(3, 1, 43, 0, -2), ROW(4, NULL, 55, 1, -2), ROW(5, 3, 19, 0, -3), ROW(6, 2, 81, 0, -3), ROW(7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `_order`, `a`, coalesce(`b`, -1) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)), \ -values2 AS (SELECT row_number() OVER (ORDER BY `_order` IS NULL ASC, `_order` ASC, `_order_groups` ASC, `_order_key` ASC) AS `_order`, `a`, `b`, `c`, `d` FROM values1) \ -SELECT * FROM values2 ORDER BY `_order` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(1, 0, 28, 0, -1), ROW(2, 0, 91, 1, -1), ROW(3, 1, 43, 0, -2), ROW(4, NULL, 55, 1, -2), ROW(5, 3, 19, 0, -3), ROW(6, 2, 81, 0, -3), ROW(7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), values1 AS (SELECT `_order`, `a`, coalesce(`b`, CAST(-1 AS SIGNED)) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`) AS _groups LEFT JOIN values0 USING(`a`, `c`)), values2 AS (SELECT row_number() OVER (ORDER BY `_order` IS NULL ASC, `_order` ASC, `_order_groups` ASC, `_order_key` ASC) AS `_order`, `a`, `b`, `c`, `d` FROM values1) SELECT * FROM values2 ORDER BY `_order` ASC """ postgres = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ redshift = """ -WITH \ -values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_order", "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ snowflake = """ -WITH \ -values0 AS (SELECT "COLUMN1" AS "_order", "COLUMN2" AS "a", "COLUMN3" AS "b", "COLUMN4" AS "c", "COLUMN5" AS "d" FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4))), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT "COLUMN1" AS "_order", "COLUMN2" AS "a", "COLUMN3" AS "b", "COLUMN4" AS "c", "COLUMN5" AS "d" FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4))), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", min("_order") AS "_order_groups" FROM values0 GROUP BY "c") AS _groups LEFT JOIN values0 USING("a", "c")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ result = ''' +--------+---+----+---+----+ @@ -411,82 +244,34 @@ result = ''' [ordered_two_groups] athena = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ bigquery = """ -WITH \ -values0 AS (SELECT 1 AS `_order`, 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 2 AS `_order`, 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 3 AS `_order`, 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT 4 AS `_order`, NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 5 AS `_order`, 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 6 AS `_order`, 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 7 AS `_order`, 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), \ -values1 AS (SELECT `_order`, `a`, coalesce(`b`, -1) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)), \ -values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) \ -SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST - +WITH values0 AS (SELECT 1 AS `_order`, 0 AS `a`, 28 AS `b`, 0 AS `c`, -1 AS `d` UNION ALL SELECT 2 AS `_order`, 0 AS `a`, 91 AS `b`, 1 AS `c`, -1 AS `d` UNION ALL SELECT 3 AS `_order`, 1 AS `a`, 43 AS `b`, 0 AS `c`, -2 AS `d` UNION ALL SELECT 4 AS `_order`, NULL AS `a`, 55 AS `b`, 1 AS `c`, -2 AS `d` UNION ALL SELECT 5 AS `_order`, 3 AS `a`, 19 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 6 AS `_order`, 2 AS `a`, 81 AS `b`, 0 AS `c`, -3 AS `d` UNION ALL SELECT 7 AS `_order`, 2 AS `a`, 53 AS `b`, 1 AS `c`, -4 AS `d`), values1 AS (SELECT `_order`, `a`, coalesce(`b`, CAST(-1 AS INT)) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)), values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST """ clickhouse = """ -WITH \ -values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_order", "_key"."a" AS "a", coalesce("b", -1) AS "b", "_groups"."c" AS "c", "_groups"."d" AS "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_order", "_key"."a" AS "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "_groups"."c" AS "c", "_groups"."d" AS "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ databricks = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `_order`, `a`, coalesce(`b`, -1) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)), \ -values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) \ -SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), values1 AS (SELECT `_order`, `a`, coalesce(`b`, CAST(-1 AS BIGINT)) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)), values2 AS (SELECT row_number() OVER (ORDER BY `_order` ASC NULLS LAST, `_order_groups` ASC NULLS FIRST, `_order_key` ASC NULLS FIRST) AS `_order`, `a`, `b`, `c`, `d` FROM values1) SELECT * FROM values2 ORDER BY `_order` ASC NULLS FIRST """ datafusion = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ duckdb = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ mysql = """ -WITH \ -values0 AS (SELECT * FROM (VALUES ROW(1, 0, 28, 0, -1), ROW(2, 0, 91, 1, -1), ROW(3, 1, 43, 0, -2), ROW(4, NULL, 55, 1, -2), ROW(5, 3, 19, 0, -3), ROW(6, 2, 81, 0, -3), ROW(7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), \ -values1 AS (SELECT `_order`, `a`, coalesce(`b`, -1) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)), \ -values2 AS (SELECT row_number() OVER (ORDER BY `_order` IS NULL ASC, `_order` ASC, `_order_groups` ASC, `_order_key` ASC) AS `_order`, `a`, `b`, `c`, `d` FROM values1) \ -SELECT * FROM values2 ORDER BY `_order` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(1, 0, 28, 0, -1), ROW(2, 0, 91, 1, -1), ROW(3, 1, 43, 0, -2), ROW(4, NULL, 55, 1, -2), ROW(5, 3, 19, 0, -3), ROW(6, 2, 81, 0, -3), ROW(7, 2, 53, 1, -4)) AS `_values` (`_order`, `a`, `b`, `c`, `d`)), values1 AS (SELECT `_order`, `a`, coalesce(`b`, CAST(-1 AS SIGNED)) AS `b`, `c`, `d`, `_order_key`, `_order_groups` FROM (SELECT `a`, min(`_order`) AS `_order_key` FROM values0 WHERE `a` IS NOT NULL GROUP BY `a`) AS _key CROSS JOIN (SELECT `c`, `d`, min(`_order`) AS `_order_groups` FROM values0 GROUP BY `c`, `d`) AS _groups LEFT JOIN values0 USING(`a`, `c`, `d`)), values2 AS (SELECT row_number() OVER (ORDER BY `_order` IS NULL ASC, `_order` ASC, `_order_groups` ASC, `_order_key` ASC) AS `_order`, `a`, `b`, `c`, `d` FROM values1) SELECT * FROM values2 ORDER BY `_order` ASC """ postgres = """ -WITH \ -values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT * FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4)) AS "_values" ("_order", "a", "b", "c", "d")), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ redshift = """ -WITH \ -values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT 1 AS "_order", 0 AS "a", 28 AS "b", 0 AS "c", -1 AS "d" UNION ALL SELECT 2 AS "_order", 0 AS "a", 91 AS "b", 1 AS "c", -1 AS "d" UNION ALL SELECT 3 AS "_order", 1 AS "a", 43 AS "b", 0 AS "c", -2 AS "d" UNION ALL SELECT 4 AS "_order", NULL AS "a", 55 AS "b", 1 AS "c", -2 AS "d" UNION ALL SELECT 5 AS "_order", 3 AS "a", 19 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 6 AS "_order", 2 AS "a", 81 AS "b", 0 AS "c", -3 AS "d" UNION ALL SELECT 7 AS "_order", 2 AS "a", 53 AS "b", 1 AS "c", -4 AS "d"), values1 AS (SELECT "_order", "a", coalesce("b", CASE WHEN -1 IS NOT NULL THEN CAST(-1 AS BIGINT) ELSE NULL END) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ snowflake = """ -WITH \ -values0 AS (SELECT "COLUMN1" AS "_order", "COLUMN2" AS "a", "COLUMN3" AS "b", "COLUMN4" AS "c", "COLUMN5" AS "d" FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4))), \ -values1 AS (SELECT "_order", "a", coalesce("b", -1) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), \ -values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) \ -SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST - +WITH values0 AS (SELECT "COLUMN1" AS "_order", "COLUMN2" AS "a", "COLUMN3" AS "b", "COLUMN4" AS "c", "COLUMN5" AS "d" FROM (VALUES (1, 0, 28, 0, -1), (2, 0, 91, 1, -1), (3, 1, 43, 0, -2), (4, NULL, 55, 1, -2), (5, 3, 19, 0, -3), (6, 2, 81, 0, -3), (7, 2, 53, 1, -4))), values1 AS (SELECT "_order", "a", coalesce("b", CAST(-1 AS BIGINT)) AS "b", "c", "d", "_order_key", "_order_groups" FROM (SELECT "a", min("_order") AS "_order_key" FROM values0 WHERE "a" IS NOT NULL GROUP BY "a") AS _key CROSS JOIN (SELECT "c", "d", min("_order") AS "_order_groups" FROM values0 GROUP BY "c", "d") AS _groups LEFT JOIN values0 USING("a", "c", "d")), values2 AS (SELECT row_number() OVER (ORDER BY "_order" ASC NULLS LAST, "_order_groups" ASC NULLS FIRST, "_order_key" ASC NULLS FIRST) AS "_order", "a", "b", "c", "d" FROM values1) SELECT * FROM values2 ORDER BY "_order" ASC NULLS FIRST """ result = ''' +--------+---+----+---+----+ diff --git a/vegafusion-sql/tests/expected/select.toml b/vegafusion-sql/tests/expected/select.toml index df2139879..26d747680 100644 --- a/vegafusion-sql/tests/expected/select.toml +++ b/vegafusion-sql/tests/expected/select.toml @@ -332,34 +332,34 @@ result = ''' [scalar_math_functions] athena = """ -WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log10("c") AS "log", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ bigquery = """ -WITH values0 AS (SELECT 0 AS `a`, -1.8 AS `b`, 0.1 AS `c` UNION ALL SELECT 1 AS `a`, -1.0 AS `b`, 0.2 AS `c` UNION ALL SELECT 2 AS `a`, 0.0 AS `b`, 0.4 AS `c` UNION ALL SELECT 3 AS `a`, 1.0 AS `b`, 0.6 AS `c` UNION ALL SELECT 4 AS `a`, 1.8 AS `b`, 0.8 AS `c` UNION ALL SELECT 5 AS `a`, NULL AS `b`, NULL AS `c`), values1 AS (SELECT `a`, abs(`b`) AS `abs`, acos(`c`) AS `acos`, asin(`c`) AS `asin`, atan(`c`) AS `atan`, atan2(`c`, `a`) AS `atan2`, CEIL(`b`) AS `ceil`, cos(`b`) AS `cos`, exp(`b`) AS `exp`, FLOOR(`b`) AS `floor`, ln(`c`) AS `ln`, log10(`c`) AS `log`, log10(`c`) AS `log10`, log(`c`, 2) AS `log2`, pow(`b`, `a`) AS `power`, round(`b`) AS `round`, sin(`b`) AS `sin`, sqrt(`c`) AS `sqrt`, tan(`b`) AS `tan` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS `a`, -1.8 AS `b`, 0.1 AS `c` UNION ALL SELECT 1 AS `a`, -1.0 AS `b`, 0.2 AS `c` UNION ALL SELECT 2 AS `a`, 0.0 AS `b`, 0.4 AS `c` UNION ALL SELECT 3 AS `a`, 1.0 AS `b`, 0.6 AS `c` UNION ALL SELECT 4 AS `a`, 1.8 AS `b`, 0.8 AS `c` UNION ALL SELECT 5 AS `a`, NULL AS `b`, NULL AS `c`), values1 AS (SELECT `a`, abs(`b`) AS `abs`, acos(`c`) AS `acos`, asin(`c`) AS `asin`, atan(`c`) AS `atan`, atan2(`c`, `a`) AS `atan2`, CEIL(`b`) AS `ceil`, cos(`b`) AS `cos`, exp(`b`) AS `exp`, FLOOR(`b`) AS `floor`, ln(`c`) AS `ln`, log10(`c`) AS `log10`, log(`c`, 2) AS `log2`, pow(`b`, `a`) AS `power`, round(`b`) AS `round`, sin(`b`) AS `sin`, sqrt(`c`) AS `sqrt`, tan(`b`) AS `tan` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST """ clickhouse = """ -WITH values0 AS (SELECT 0 AS "a", -1.8 AS "b", 0.1 AS "c" UNION ALL SELECT 1 AS "a", -1.0 AS "b", 0.2 AS "c" UNION ALL SELECT 2 AS "a", 0.0 AS "b", 0.4 AS "c" UNION ALL SELECT 3 AS "a", 1.0 AS "b", 0.6 AS "c" UNION ALL SELECT 4 AS "a", 1.8 AS "b", 0.8 AS "c" UNION ALL SELECT 5 AS "a", NULL AS "b", NULL AS "c"), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log10("c") AS "log", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", -1.8 AS "b", 0.1 AS "c" UNION ALL SELECT 1 AS "a", -1.0 AS "b", 0.2 AS "c" UNION ALL SELECT 2 AS "a", 0.0 AS "b", 0.4 AS "c" UNION ALL SELECT 3 AS "a", 1.0 AS "b", 0.6 AS "c" UNION ALL SELECT 4 AS "a", 1.8 AS "b", 0.8 AS "c" UNION ALL SELECT 5 AS "a", NULL AS "b", NULL AS "c"), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ databricks = """ -WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS `_values` (`a`, `b`, `c`)), values1 AS (SELECT `a`, abs(`b`) AS `abs`, acos(`c`) AS `acos`, asin(`c`) AS `asin`, atan(`c`) AS `atan`, atan2(`c`, `a`) AS `atan2`, CEIL(`b`) AS `ceil`, cos(`b`) AS `cos`, exp(`b`) AS `exp`, FLOOR(`b`) AS `floor`, ln(`c`) AS `ln`, log10(`c`) AS `log`, log10(`c`) AS `log10`, log2(`c`) AS `log2`, pow(`b`, `a`) AS `power`, round(`b`) AS `round`, sin(`b`) AS `sin`, sqrt(`c`) AS `sqrt`, tan(`b`) AS `tan` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS `_values` (`a`, `b`, `c`)), values1 AS (SELECT `a`, abs(`b`) AS `abs`, acos(`c`) AS `acos`, asin(`c`) AS `asin`, atan(`c`) AS `atan`, atan2(`c`, `a`) AS `atan2`, CEIL(`b`) AS `ceil`, cos(`b`) AS `cos`, exp(`b`) AS `exp`, FLOOR(`b`) AS `floor`, ln(`c`) AS `ln`, log10(`c`) AS `log10`, log2(`c`) AS `log2`, pow(`b`, `a`) AS `power`, round(`b`) AS `round`, sin(`b`) AS `sin`, sqrt(`c`) AS `sqrt`, tan(`b`) AS `tan` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC NULLS FIRST """ datafusion = """ -WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log("c") AS "log", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ duckdb = """ -WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", pow(2.718281828459045, "b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log("c") AS "log", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", pow(2.718281828459045, "b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log10("c") AS "log10", log2("c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ mysql = """ -WITH values0 AS (SELECT * FROM (VALUES ROW(0, -1.8, 0.1), ROW(1, -1.0, 0.2), ROW(2, 0.0, 0.4), ROW(3, 1.0, 0.6), ROW(4, 1.8, 0.8), ROW(5, NULL, NULL)) AS `_values` (`a`, `b`, `c`)), values1 AS (SELECT `a`, abs(`b`) AS `abs`, acos(`c`) AS `acos`, asin(`c`) AS `asin`, atan(`c`) AS `atan`, atan2(`c`, `a`) AS `atan2`, CEIL(`b`) AS `ceil`, cos(`b`) AS `cos`, exp(`b`) AS `exp`, FLOOR(`b`) AS `floor`, ln(`c`) AS `ln`, log10(`c`) AS `log`, log10(`c`) AS `log10`, log2(`c`) AS `log2`, pow(`b`, `a`) AS `power`, round(`b`) AS `round`, sin(`b`) AS `sin`, sqrt(`c`) AS `sqrt`, tan(`b`) AS `tan` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC +WITH values0 AS (SELECT * FROM (VALUES ROW(0, -1.8, 0.1), ROW(1, -1.0, 0.2), ROW(2, 0.0, 0.4), ROW(3, 1.0, 0.6), ROW(4, 1.8, 0.8), ROW(5, NULL, NULL)) AS `_values` (`a`, `b`, `c`)), values1 AS (SELECT `a`, abs(`b`) AS `abs`, acos(`c`) AS `acos`, asin(`c`) AS `asin`, atan(`c`) AS `atan`, atan2(`c`, `a`) AS `atan2`, CEIL(`b`) AS `ceil`, cos(`b`) AS `cos`, exp(`b`) AS `exp`, FLOOR(`b`) AS `floor`, ln(`c`) AS `ln`, log10(`c`) AS `log10`, log2(`c`) AS `log2`, pow(`b`, `a`) AS `power`, round(`b`) AS `round`, sin(`b`) AS `sin`, sqrt(`c`) AS `sqrt`, tan(`b`) AS `tan` FROM values0) SELECT * FROM values1 ORDER BY `a` ASC """ postgres = """ -WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log("c") AS "log", log(10, "c") AS "log10", log(2, "c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT * FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL)) AS "_values" ("a", "b", "c")), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log(10, "c") AS "log10", log(2, "c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ redshift = """ -WITH values0 AS (SELECT 0 AS "a", -1.8 AS "b", 0.1 AS "c" UNION ALL SELECT 1 AS "a", -1.0 AS "b", 0.2 AS "c" UNION ALL SELECT 2 AS "a", 0.0 AS "b", 0.4 AS "c" UNION ALL SELECT 3 AS "a", 1.0 AS "b", 0.6 AS "c" UNION ALL SELECT 4 AS "a", 1.8 AS "b", 0.8 AS "c" UNION ALL SELECT 5 AS "a", NULL AS "b", NULL AS "c"), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln(CAST("c" AS DOUBLE PRECISION)) AS "ln", log(CAST("c" AS DOUBLE PRECISION)) AS "log", log(CAST("c" AS DOUBLE PRECISION)) AS "log10", ln(CAST("c" AS DOUBLE PRECISION)) / ln(2) AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT 0 AS "a", -1.8 AS "b", 0.1 AS "c" UNION ALL SELECT 1 AS "a", -1.0 AS "b", 0.2 AS "c" UNION ALL SELECT 2 AS "a", 0.0 AS "b", 0.4 AS "c" UNION ALL SELECT 3 AS "a", 1.0 AS "b", 0.6 AS "c" UNION ALL SELECT 4 AS "a", 1.8 AS "b", 0.8 AS "c" UNION ALL SELECT 5 AS "a", NULL AS "b", NULL AS "c"), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln(CAST("c" AS DOUBLE PRECISION)) AS "ln", log(CAST("c" AS DOUBLE PRECISION)) AS "log10", ln(CAST("c" AS DOUBLE PRECISION)) / ln(2) AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ snowflake = """ -WITH values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c" FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL))), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log(10, "c") AS "log", log(10, "c") AS "log10", log(2, "c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST +WITH values0 AS (SELECT "COLUMN1" AS "a", "COLUMN2" AS "b", "COLUMN3" AS "c" FROM (VALUES (0, -1.8, 0.1), (1, -1.0, 0.2), (2, 0.0, 0.4), (3, 1.0, 0.6), (4, 1.8, 0.8), (5, NULL, NULL))), values1 AS (SELECT "a", abs("b") AS "abs", acos("c") AS "acos", asin("c") AS "asin", atan("c") AS "atan", atan2("c", "a") AS "atan2", CEIL("b") AS "ceil", cos("b") AS "cos", exp("b") AS "exp", FLOOR("b") AS "floor", ln("c") AS "ln", log(10, "c") AS "log10", log(2, "c") AS "log2", pow("b", "a") AS "power", round("b") AS "round", sin("b") AS "sin", sqrt("c") AS "sqrt", tan("b") AS "tan" FROM values0) SELECT * FROM values1 ORDER BY "a" ASC NULLS FIRST """ result = ''' +---+-----+--------------------+---------------------+---------------------+---------------------+------+---------------------+---------------------+-------+---------------------+----------------------+----------------------+---------------------+---------+-------+---------------------+---------------------+--------------------+ diff --git a/vegafusion-sql/tests/test_aggregate.rs b/vegafusion-sql/tests/test_aggregate.rs index 3ff95e2eb..e61df58d9 100644 --- a/vegafusion-sql/tests/test_aggregate.rs +++ b/vegafusion-sql/tests/test_aggregate.rs @@ -2,7 +2,7 @@ extern crate lazy_static; mod utils; -use datafusion_expr::{avg, count, expr, lit, max, min, round, sum, AggregateFunction, Expr}; +use datafusion_expr::{avg, count, expr, lit, max, min, sum, AggregateFunction, Expr}; use rstest::rstest; use rstest_reuse::{self, *}; use serde_json::json; @@ -77,6 +77,8 @@ mod test_median_agg { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -101,6 +103,7 @@ mod test_median_agg { distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("median_a"), ], @@ -128,6 +131,9 @@ mod test_variance_aggs { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use datafusion_functions::expr_fn::round; + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -151,6 +157,7 @@ mod test_variance_aggs { distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .mul(lit(100))]) .div(lit(100)) @@ -163,6 +170,7 @@ mod test_variance_aggs { distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .mul(lit(100))]) .div(lit(100)) @@ -173,6 +181,7 @@ mod test_variance_aggs { distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .mul(lit(100))]) .div(lit(100)) @@ -185,6 +194,7 @@ mod test_variance_aggs { distinct: false, filter: None, order_by: None, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .mul(lit(100))]) .div(lit(100)) diff --git a/vegafusion-sql/tests/test_select.rs b/vegafusion-sql/tests/test_select.rs index 7132c2b14..1d5311e75 100644 --- a/vegafusion-sql/tests/test_select.rs +++ b/vegafusion-sql/tests/test_select.rs @@ -485,27 +485,16 @@ mod test_non_finite_numbers { #[cfg(test)] mod test_scalar_math_functions { use crate::*; - use datafusion_expr::{expr, BuiltinScalarFunction, Expr, ScalarFunctionDefinition}; + use datafusion_expr::{expr, Expr}; + use datafusion_functions::math::expr_fn::{abs, acos, asin, tan}; use vegafusion_common::column::flat_col; - fn make_scalar_fn1(fun: BuiltinScalarFunction, arg: &str, alias: &str) -> Expr { - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(fun), - args: vec![flat_col(arg)], - }) - .alias(alias) - } - - fn make_scalar_fn2(fun: BuiltinScalarFunction, arg1: &str, arg2: &str, alias: &str) -> Expr { - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(fun), - args: vec![flat_col(arg1), flat_col(arg2)], - }) - .alias(alias) - } - #[apply(dialect_names)] async fn test(dialect_name: &str) { + use datafusion_functions::expr_fn::{ + atan, atan2, ceil, cos, exp, floor, ln, log10, log2, power, round, sin, sqrt, + }; + println!("{dialect_name}"); let (conn, _evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -523,24 +512,23 @@ mod test_scalar_math_functions { let df_result = df .select(vec![ flat_col("a"), - make_scalar_fn1(BuiltinScalarFunction::Abs, "b", "abs"), - make_scalar_fn1(BuiltinScalarFunction::Acos, "c", "acos"), - make_scalar_fn1(BuiltinScalarFunction::Asin, "c", "asin"), - make_scalar_fn1(BuiltinScalarFunction::Atan, "c", "atan"), - make_scalar_fn2(BuiltinScalarFunction::Atan2, "c", "a", "atan2"), - make_scalar_fn1(BuiltinScalarFunction::Ceil, "b", "ceil"), - make_scalar_fn1(BuiltinScalarFunction::Cos, "b", "cos"), - make_scalar_fn1(BuiltinScalarFunction::Exp, "b", "exp"), - make_scalar_fn1(BuiltinScalarFunction::Floor, "b", "floor"), - make_scalar_fn1(BuiltinScalarFunction::Ln, "c", "ln"), - make_scalar_fn1(BuiltinScalarFunction::Log, "c", "log"), - make_scalar_fn1(BuiltinScalarFunction::Log10, "c", "log10"), - make_scalar_fn1(BuiltinScalarFunction::Log2, "c", "log2"), - make_scalar_fn2(BuiltinScalarFunction::Power, "b", "a", "power"), - make_scalar_fn1(BuiltinScalarFunction::Round, "b", "round"), - make_scalar_fn1(BuiltinScalarFunction::Sin, "b", "sin"), - make_scalar_fn1(BuiltinScalarFunction::Sqrt, "c", "sqrt"), - make_scalar_fn1(BuiltinScalarFunction::Tan, "b", "tan"), + abs(flat_col("b")).alias("abs"), + acos(flat_col("c")).alias("acos"), + asin(flat_col("c")).alias("asin"), + atan(flat_col("c")).alias("atan"), + atan2(flat_col("c"), flat_col("a")).alias("atan2"), + ceil(flat_col("b")).alias("ceil"), + cos(flat_col("b")).alias("cos"), + exp(flat_col("b")).alias("exp"), + floor(flat_col("b")).alias("floor"), + ln(flat_col("c")).alias("ln"), + log10(flat_col("c")).alias("log10"), + log2(flat_col("c")).alias("log2"), + power(flat_col("b"), flat_col("a")).alias("power"), + round(vec![flat_col("b")]).alias("round"), + sin(flat_col("b")).alias("sin"), + sqrt(flat_col("c")).alias("sqrt"), + tan(flat_col("b")).alias("tan"), ]) .await; @@ -1467,11 +1455,14 @@ mod test_timestamp_to_utc_timestamp { #[cfg(test)] mod test_string_ops { use crate::*; - use datafusion_expr::{expr, lit, BuiltinScalarFunction, Expr, ScalarFunctionDefinition}; + use datafusion_expr::{expr, lit, Expr}; + use datafusion_functions::string::expr_fn::{concat, lower, upper}; use vegafusion_common::column::flat_col; #[apply(dialect_names)] async fn test(dialect_name: &str) { + use datafusion_functions::expr_fn::substring; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -1489,26 +1480,10 @@ mod test_string_ops { flat_col("a"), flat_col("b"), flat_col("c"), - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Substr), - args: vec![flat_col("b"), lit(2), lit(2)], - }) - .alias("b_substr"), - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Concat), - args: vec![flat_col("b"), lit(" "), flat_col("c")], - }) - .alias("bc_concat"), - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Upper), - args: vec![flat_col("b")], - }) - .alias("b_upper"), - Expr::ScalarFunction(expr::ScalarFunction { - func_def: ScalarFunctionDefinition::BuiltIn(BuiltinScalarFunction::Lower), - args: vec![flat_col("b")], - }) - .alias("b_lower"), + substring(flat_col("b"), lit(2), lit(2)).alias("b_substr"), + concat(vec![flat_col("b"), lit(" "), flat_col("c")]).alias("bc_concat"), + upper(flat_col("b")).alias("b_upper"), + lower(flat_col("b")).alias("b_lower"), ]) .await; diff --git a/vegafusion-sql/tests/test_stack.rs b/vegafusion-sql/tests/test_stack.rs index fcd9660c2..513459c37 100644 --- a/vegafusion-sql/tests/test_stack.rs +++ b/vegafusion-sql/tests/test_stack.rs @@ -2,7 +2,7 @@ extern crate lazy_static; mod utils; -use datafusion_expr::{expr, lit, round, Expr}; +use datafusion_expr::{expr, lit, Expr}; use rstest::rstest; use rstest_reuse::{self, *}; use serde_json::json; @@ -107,6 +107,8 @@ mod test_mode_normalized { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use datafusion_functions::expr_fn::round; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); let df = stack_data(conn); diff --git a/vegafusion-sql/tests/test_window.rs b/vegafusion-sql/tests/test_window.rs index 524033b01..278c55d66 100644 --- a/vegafusion-sql/tests/test_window.rs +++ b/vegafusion-sql/tests/test_window.rs @@ -22,6 +22,8 @@ mod test_simple_aggs_unbounded { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -52,6 +54,7 @@ mod test_simple_aggs_unbounded { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("sum_b"), Expr::WindowFunction(expr::WindowFunction { @@ -60,6 +63,7 @@ mod test_simple_aggs_unbounded { partition_by: vec![flat_col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("count_part_b"), Expr::WindowFunction(expr::WindowFunction { @@ -68,6 +72,7 @@ mod test_simple_aggs_unbounded { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("cume_mean_b"), Expr::WindowFunction(expr::WindowFunction { @@ -76,6 +81,7 @@ mod test_simple_aggs_unbounded { partition_by: vec![flat_col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("min_b"), Expr::WindowFunction(expr::WindowFunction { @@ -84,6 +90,7 @@ mod test_simple_aggs_unbounded { partition_by: vec![], order_by: order_by.clone(), window_frame, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("max_b"), ]) @@ -115,6 +122,8 @@ mod test_simple_aggs_unbounded_groups { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -149,6 +158,7 @@ mod test_simple_aggs_unbounded_groups { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("sum_b"), Expr::WindowFunction(expr::WindowFunction { @@ -157,6 +167,7 @@ mod test_simple_aggs_unbounded_groups { partition_by: vec![flat_col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("count_part_b"), Expr::WindowFunction(expr::WindowFunction { @@ -165,6 +176,7 @@ mod test_simple_aggs_unbounded_groups { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("cume_mean_b"), Expr::WindowFunction(expr::WindowFunction { @@ -173,6 +185,7 @@ mod test_simple_aggs_unbounded_groups { partition_by: vec![flat_col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("min_b"), Expr::WindowFunction(expr::WindowFunction { @@ -181,6 +194,7 @@ mod test_simple_aggs_unbounded_groups { partition_by: vec![], order_by: order_by.clone(), window_frame, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("max_b"), ]) @@ -213,6 +227,8 @@ mod test_simple_aggs_bounded { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -247,6 +263,7 @@ mod test_simple_aggs_bounded { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("sum_b"), Expr::WindowFunction(expr::WindowFunction { @@ -255,6 +272,7 @@ mod test_simple_aggs_bounded { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("count_part_b"), Expr::WindowFunction(expr::WindowFunction { @@ -263,6 +281,7 @@ mod test_simple_aggs_bounded { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("cume_mean_b"), Expr::WindowFunction(expr::WindowFunction { @@ -271,6 +290,7 @@ mod test_simple_aggs_bounded { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("min_b"), Expr::WindowFunction(expr::WindowFunction { @@ -279,6 +299,7 @@ mod test_simple_aggs_bounded { partition_by: vec![], order_by: order_by.clone(), window_frame, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("max_b"), ]) @@ -310,6 +331,8 @@ mod test_simple_aggs_bounded_groups { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -344,6 +367,7 @@ mod test_simple_aggs_bounded_groups { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("sum_b"), Expr::WindowFunction(expr::WindowFunction { @@ -352,6 +376,7 @@ mod test_simple_aggs_bounded_groups { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("count_part_b"), Expr::WindowFunction(expr::WindowFunction { @@ -360,6 +385,7 @@ mod test_simple_aggs_bounded_groups { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("cume_mean_b"), Expr::WindowFunction(expr::WindowFunction { @@ -368,6 +394,7 @@ mod test_simple_aggs_bounded_groups { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("min_b"), Expr::WindowFunction(expr::WindowFunction { @@ -376,6 +403,7 @@ mod test_simple_aggs_bounded_groups { partition_by: vec![], order_by: order_by.clone(), window_frame, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("max_b"), ]) @@ -422,6 +450,8 @@ mod test_simple_window_fns { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -454,6 +484,7 @@ mod test_simple_window_fns { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("row_num"), Expr::WindowFunction(expr::WindowFunction { @@ -464,6 +495,7 @@ mod test_simple_window_fns { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("rank"), Expr::WindowFunction(expr::WindowFunction { @@ -474,6 +506,7 @@ mod test_simple_window_fns { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("d_rank"), Expr::WindowFunction(expr::WindowFunction { @@ -484,6 +517,7 @@ mod test_simple_window_fns { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("first"), Expr::WindowFunction(expr::WindowFunction { @@ -494,6 +528,7 @@ mod test_simple_window_fns { partition_by: vec![], order_by: order_by.clone(), window_frame, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("last"), ]) @@ -525,6 +560,8 @@ mod test_advanced_window_fns { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -557,6 +594,7 @@ mod test_advanced_window_fns { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("nth1"), Expr::WindowFunction(expr::WindowFunction { @@ -567,6 +605,7 @@ mod test_advanced_window_fns { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("cdist"), Expr::WindowFunction(expr::WindowFunction { @@ -577,6 +616,7 @@ mod test_advanced_window_fns { partition_by: vec![], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("lag_b"), Expr::WindowFunction(expr::WindowFunction { @@ -587,6 +627,7 @@ mod test_advanced_window_fns { partition_by: vec![col("c")], order_by: order_by.clone(), window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("lead_b"), Expr::WindowFunction(expr::WindowFunction { @@ -597,6 +638,7 @@ mod test_advanced_window_fns { partition_by: vec![], order_by: order_by.clone(), window_frame, + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("ntile"), ]) @@ -628,6 +670,8 @@ mod test_unordered_row_number { #[apply(dialect_names)] async fn test(dialect_name: &str) { + use sqlparser::ast::NullTreatment; + println!("{dialect_name}"); let (conn, evaluable) = TOKIO_RUNTIME.block_on(make_connection(dialect_name)); @@ -660,6 +704,7 @@ mod test_unordered_row_number { partition_by: vec![], order_by: vec![], window_frame: window_frame.clone(), + null_treatment: Some(NullTreatment::IgnoreNulls), }) .alias("row_num"), ])