diff --git a/.github/workflows/assign_milestone.yml b/.github/workflows/assign_milestone.yml index 686655b9284..0e771fdaf0d 100644 --- a/.github/workflows/assign_milestone.yml +++ b/.github/workflows/assign_milestone.yml @@ -20,7 +20,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Checkout code uses: actions/checkout@v3 diff --git a/.github/workflows/check_label.yml b/.github/workflows/check_label.yml index c3c89273df8..9475a962097 100644 --- a/.github/workflows/check_label.yml +++ b/.github/workflows/check_label.yml @@ -3,10 +3,6 @@ on: pull_request: types: [opened, labeled, unlabeled, synchronize] -concurrency: - group: format('{0}-{1}', ${{ github.ref }}, 'Check Pull Request labels') - cancel-in-progress: true - permissions: read-all jobs: diff --git a/.github/workflows/check_make_vtadmin_authz_testgen.yml b/.github/workflows/check_make_vtadmin_authz_testgen.yml index 064a700d833..13bce6d26bb 100644 --- a/.github/workflows/check_make_vtadmin_authz_testgen.yml +++ b/.github/workflows/check_make_vtadmin_authz_testgen.yml @@ -31,7 +31,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -50,7 +50,7 @@ jobs: uses: actions/setup-go@v4 if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.vtadmin_changes == 'true' with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.vtadmin_changes == 'true' diff --git a/.github/workflows/check_make_vtadmin_web_proto.yml b/.github/workflows/check_make_vtadmin_web_proto.yml index 7db6bceeeeb..fbe1d29f619 100644 --- a/.github/workflows/check_make_vtadmin_web_proto.yml +++ b/.github/workflows/check_make_vtadmin_web_proto.yml @@ -31,7 +31,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -52,7 +52,7 @@ jobs: uses: actions/setup-go@v4 if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.proto_changes == 'true' with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Setup Node if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.proto_changes == 'true' diff --git a/.github/workflows/cluster_endtoend_12.yml b/.github/workflows/cluster_endtoend_12.yml index 7496577ef0d..af92f74d523 100644 --- a/.github/workflows/cluster_endtoend_12.yml +++ b/.github/workflows/cluster_endtoend_12.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_13.yml b/.github/workflows/cluster_endtoend_13.yml index c93f7e5526d..0e7e2b22e2a 100644 --- a/.github/workflows/cluster_endtoend_13.yml +++ b/.github/workflows/cluster_endtoend_13.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_15.yml b/.github/workflows/cluster_endtoend_15.yml index 469cfaf3080..b60c5efb1ef 100644 --- a/.github/workflows/cluster_endtoend_15.yml +++ b/.github/workflows/cluster_endtoend_15.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_18.yml b/.github/workflows/cluster_endtoend_18.yml index d76a5ada83d..5722ef60e47 100644 --- a/.github/workflows/cluster_endtoend_18.yml +++ b/.github/workflows/cluster_endtoend_18.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_21.yml b/.github/workflows/cluster_endtoend_21.yml index 23d570a2b98..518e7804d4c 100644 --- a/.github/workflows/cluster_endtoend_21.yml +++ b/.github/workflows/cluster_endtoend_21.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_22.yml b/.github/workflows/cluster_endtoend_22.yml index 9cba996f309..bd70bafcfc2 100644 --- a/.github/workflows/cluster_endtoend_22.yml +++ b/.github/workflows/cluster_endtoend_22.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_backup_pitr.yml b/.github/workflows/cluster_endtoend_backup_pitr.yml index df34a64a562..31c88d0d2cb 100644 --- a/.github/workflows/cluster_endtoend_backup_pitr.yml +++ b/.github/workflows/cluster_endtoend_backup_pitr.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_backup_pitr_mysql57.yml b/.github/workflows/cluster_endtoend_backup_pitr_mysql57.yml index 3ec659c5e59..142391ba006 100644 --- a/.github/workflows/cluster_endtoend_backup_pitr_mysql57.yml +++ b/.github/workflows/cluster_endtoend_backup_pitr_mysql57.yml @@ -53,13 +53,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -75,7 +77,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup.yml b/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup.yml index e1f66a29eb9..2fa246db39b 100644 --- a/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup.yml +++ b/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup_mysql57.yml b/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup_mysql57.yml index e9ae31c02c9..ee9e27de2b4 100644 --- a/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup_mysql57.yml +++ b/.github/workflows/cluster_endtoend_backup_pitr_xtrabackup_mysql57.yml @@ -53,13 +53,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -75,7 +77,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_ers_prs_newfeatures_heavy.yml b/.github/workflows/cluster_endtoend_ers_prs_newfeatures_heavy.yml index 5c0e1e3a012..68483b6dec2 100644 --- a/.github/workflows/cluster_endtoend_ers_prs_newfeatures_heavy.yml +++ b/.github/workflows/cluster_endtoend_ers_prs_newfeatures_heavy.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_mysql80.yml b/.github/workflows/cluster_endtoend_mysql80.yml index ea82738337d..48b03f43f0d 100644 --- a/.github/workflows/cluster_endtoend_mysql80.yml +++ b/.github/workflows/cluster_endtoend_mysql80.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_mysql_server_vault.yml b/.github/workflows/cluster_endtoend_mysql_server_vault.yml index 5d14faee1af..5ccccbdb0d5 100644 --- a/.github/workflows/cluster_endtoend_mysql_server_vault.yml +++ b/.github/workflows/cluster_endtoend_mysql_server_vault.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_ghost.yml b/.github/workflows/cluster_endtoend_onlineddl_ghost.yml index f51032f2a34..2a6b55d6078 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_ghost.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_ghost.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_ghost_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_ghost_mysql57.yml index d69865221e0..1d7c39b1f32 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_ghost_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_ghost_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_revert.yml b/.github/workflows/cluster_endtoend_onlineddl_revert.yml index 42fcb0ac100..b1a3c2f1df7 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_revert.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_revert.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_revert_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_revert_mysql57.yml index e3b4e0960dc..ce473d95275 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_revert_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_revert_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_scheduler.yml b/.github/workflows/cluster_endtoend_onlineddl_scheduler.yml index 087d33bf9b6..9c880c44477 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_scheduler.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_scheduler.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_scheduler_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_scheduler_mysql57.yml index 5e21df5ac38..8900aacf26b 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_scheduler_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_scheduler_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl.yml index fb1b980793e..a123d180863 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_mysql57.yml index 1fc42939924..9ed3c136cfe 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress.yml index 38f09912b51..014b01f7afa 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_mysql57.yml index dbd670e82b5..c7c08467ba8 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite.yml index 86f22cf8610..caca4ff40e7 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite_mysql57.yml index f0c8d0b7bda..868dcded11b 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_stress_suite_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite.yml index b20cc1f901d..eda2c922d31 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite_mysql57.yml b/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite_mysql57.yml index 8568b13288b..04d93d8bff7 100644 --- a/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite_mysql57.yml +++ b/.github/workflows/cluster_endtoend_onlineddl_vrepl_suite_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_schemadiff_vrepl.yml b/.github/workflows/cluster_endtoend_schemadiff_vrepl.yml index b937e72ef82..35b3d065922 100644 --- a/.github/workflows/cluster_endtoend_schemadiff_vrepl.yml +++ b/.github/workflows/cluster_endtoend_schemadiff_vrepl.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_schemadiff_vrepl_mysql57.yml b/.github/workflows/cluster_endtoend_schemadiff_vrepl_mysql57.yml index 2dae908d301..c5d724d1481 100644 --- a/.github/workflows/cluster_endtoend_schemadiff_vrepl_mysql57.yml +++ b/.github/workflows/cluster_endtoend_schemadiff_vrepl_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -72,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_tabletmanager_consul.yml b/.github/workflows/cluster_endtoend_tabletmanager_consul.yml index 341eae60951..aff38939010 100644 --- a/.github/workflows/cluster_endtoend_tabletmanager_consul.yml +++ b/.github/workflows/cluster_endtoend_tabletmanager_consul.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_tabletmanager_tablegc.yml b/.github/workflows/cluster_endtoend_tabletmanager_tablegc.yml index 13c3ae789ed..47b4f9c5d65 100644 --- a/.github/workflows/cluster_endtoend_tabletmanager_tablegc.yml +++ b/.github/workflows/cluster_endtoend_tabletmanager_tablegc.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_tabletmanager_tablegc_mysql57.yml b/.github/workflows/cluster_endtoend_tabletmanager_tablegc_mysql57.yml index 3d6b40bd8a7..54e2ba98aae 100644 --- a/.github/workflows/cluster_endtoend_tabletmanager_tablegc_mysql57.yml +++ b/.github/workflows/cluster_endtoend_tabletmanager_tablegc_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_tabletmanager_throttler_topo.yml b/.github/workflows/cluster_endtoend_tabletmanager_throttler_topo.yml index cc6b02e2324..65d4ee488e0 100644 --- a/.github/workflows/cluster_endtoend_tabletmanager_throttler_topo.yml +++ b/.github/workflows/cluster_endtoend_tabletmanager_throttler_topo.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_topo_connection_cache.yml b/.github/workflows/cluster_endtoend_topo_connection_cache.yml index 142d2358b47..ba4cb24d227 100644 --- a/.github/workflows/cluster_endtoend_topo_connection_cache.yml +++ b/.github/workflows/cluster_endtoend_topo_connection_cache.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_across_db_versions.yml b/.github/workflows/cluster_endtoend_vreplication_across_db_versions.yml index 965c29d6bad..cbf461a7f5c 100644 --- a/.github/workflows/cluster_endtoend_vreplication_across_db_versions.yml +++ b/.github/workflows/cluster_endtoend_vreplication_across_db_versions.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_basic.yml b/.github/workflows/cluster_endtoend_vreplication_basic.yml index 7f77747477f..c9a6bae4cb1 100644 --- a/.github/workflows/cluster_endtoend_vreplication_basic.yml +++ b/.github/workflows/cluster_endtoend_vreplication_basic.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_cellalias.yml b/.github/workflows/cluster_endtoend_vreplication_cellalias.yml index 7dddf5f34d1..38c661c5b37 100644 --- a/.github/workflows/cluster_endtoend_vreplication_cellalias.yml +++ b/.github/workflows/cluster_endtoend_vreplication_cellalias.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_migrate_vdiff2_convert_tz.yml b/.github/workflows/cluster_endtoend_vreplication_migrate_vdiff2_convert_tz.yml index 412113b055f..1d781298dc2 100644 --- a/.github/workflows/cluster_endtoend_vreplication_migrate_vdiff2_convert_tz.yml +++ b/.github/workflows/cluster_endtoend_vreplication_migrate_vdiff2_convert_tz.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_multicell.yml b/.github/workflows/cluster_endtoend_vreplication_multicell.yml index 01b1c242b91..a2c5211e156 100644 --- a/.github/workflows/cluster_endtoend_vreplication_multicell.yml +++ b/.github/workflows/cluster_endtoend_vreplication_multicell.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_partial_movetables_basic.yml b/.github/workflows/cluster_endtoend_vreplication_partial_movetables_basic.yml index c9d8f74c5ce..f89a836296b 100644 --- a/.github/workflows/cluster_endtoend_vreplication_partial_movetables_basic.yml +++ b/.github/workflows/cluster_endtoend_vreplication_partial_movetables_basic.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_partial_movetables_sequences.yml b/.github/workflows/cluster_endtoend_vreplication_partial_movetables_sequences.yml index 501cd17ca62..c4e7d12b7af 100644 --- a/.github/workflows/cluster_endtoend_vreplication_partial_movetables_sequences.yml +++ b/.github/workflows/cluster_endtoend_vreplication_partial_movetables_sequences.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vreplication_v2.yml b/.github/workflows/cluster_endtoend_vreplication_v2.yml index 61fb9971f0b..5729cf28d9a 100644 --- a/.github/workflows/cluster_endtoend_vreplication_v2.yml +++ b/.github/workflows/cluster_endtoend_vreplication_v2.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vstream_failover.yml b/.github/workflows/cluster_endtoend_vstream_failover.yml index 23c384fb5dc..5530b4182d4 100644 --- a/.github/workflows/cluster_endtoend_vstream_failover.yml +++ b/.github/workflows/cluster_endtoend_vstream_failover.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vstream_stoponreshard_false.yml b/.github/workflows/cluster_endtoend_vstream_stoponreshard_false.yml index 36e4cec2f6b..f6d49708e01 100644 --- a/.github/workflows/cluster_endtoend_vstream_stoponreshard_false.yml +++ b/.github/workflows/cluster_endtoend_vstream_stoponreshard_false.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vstream_stoponreshard_true.yml b/.github/workflows/cluster_endtoend_vstream_stoponreshard_true.yml index fc9bea5efe8..4393ca9e110 100644 --- a/.github/workflows/cluster_endtoend_vstream_stoponreshard_true.yml +++ b/.github/workflows/cluster_endtoend_vstream_stoponreshard_true.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vstream_with_keyspaces_to_watch.yml b/.github/workflows/cluster_endtoend_vstream_with_keyspaces_to_watch.yml index 5135354ee13..896e7454df0 100644 --- a/.github/workflows/cluster_endtoend_vstream_with_keyspaces_to_watch.yml +++ b/.github/workflows/cluster_endtoend_vstream_with_keyspaces_to_watch.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtbackup.yml b/.github/workflows/cluster_endtoend_vtbackup.yml index 4b954911a5f..4533b88b247 100644 --- a/.github/workflows/cluster_endtoend_vtbackup.yml +++ b/.github/workflows/cluster_endtoend_vtbackup.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtctlbackup_sharded_clustertest_heavy.yml b/.github/workflows/cluster_endtoend_vtctlbackup_sharded_clustertest_heavy.yml index c1137849dd2..ebd3425b282 100644 --- a/.github/workflows/cluster_endtoend_vtctlbackup_sharded_clustertest_heavy.yml +++ b/.github/workflows/cluster_endtoend_vtctlbackup_sharded_clustertest_heavy.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_concurrentdml.yml b/.github/workflows/cluster_endtoend_vtgate_concurrentdml.yml index 070936647e2..92e2eb4b22d 100644 --- a/.github/workflows/cluster_endtoend_vtgate_concurrentdml.yml +++ b/.github/workflows/cluster_endtoend_vtgate_concurrentdml.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_foreignkey_stress.yml b/.github/workflows/cluster_endtoend_vtgate_foreignkey_stress.yml index ff6688e1878..14e60795904 100644 --- a/.github/workflows/cluster_endtoend_vtgate_foreignkey_stress.yml +++ b/.github/workflows/cluster_endtoend_vtgate_foreignkey_stress.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_gen4.yml b/.github/workflows/cluster_endtoend_vtgate_gen4.yml index 8a12094028b..7ecd1e930e9 100644 --- a/.github/workflows/cluster_endtoend_vtgate_gen4.yml +++ b/.github/workflows/cluster_endtoend_vtgate_gen4.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_general_heavy.yml b/.github/workflows/cluster_endtoend_vtgate_general_heavy.yml index a076b7b0a1a..d53cee91931 100644 --- a/.github/workflows/cluster_endtoend_vtgate_general_heavy.yml +++ b/.github/workflows/cluster_endtoend_vtgate_general_heavy.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_godriver.yml b/.github/workflows/cluster_endtoend_vtgate_godriver.yml index 8b652338b51..4013936c378 100644 --- a/.github/workflows/cluster_endtoend_vtgate_godriver.yml +++ b/.github/workflows/cluster_endtoend_vtgate_godriver.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_partial_keyspace.yml b/.github/workflows/cluster_endtoend_vtgate_partial_keyspace.yml index e8cbcf61a83..a8c4562fe57 100644 --- a/.github/workflows/cluster_endtoend_vtgate_partial_keyspace.yml +++ b/.github/workflows/cluster_endtoend_vtgate_partial_keyspace.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_queries.yml b/.github/workflows/cluster_endtoend_vtgate_queries.yml index d908a13fe23..9519e5b3898 100644 --- a/.github/workflows/cluster_endtoend_vtgate_queries.yml +++ b/.github/workflows/cluster_endtoend_vtgate_queries.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_readafterwrite.yml b/.github/workflows/cluster_endtoend_vtgate_readafterwrite.yml index 6a027d039cf..3988d550dbc 100644 --- a/.github/workflows/cluster_endtoend_vtgate_readafterwrite.yml +++ b/.github/workflows/cluster_endtoend_vtgate_readafterwrite.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_reservedconn.yml b/.github/workflows/cluster_endtoend_vtgate_reservedconn.yml index b0278160979..6e91a778637 100644 --- a/.github/workflows/cluster_endtoend_vtgate_reservedconn.yml +++ b/.github/workflows/cluster_endtoend_vtgate_reservedconn.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_schema.yml b/.github/workflows/cluster_endtoend_vtgate_schema.yml index 3d99a03cace..72aeee8f90a 100644 --- a/.github/workflows/cluster_endtoend_vtgate_schema.yml +++ b/.github/workflows/cluster_endtoend_vtgate_schema.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_schema_tracker.yml b/.github/workflows/cluster_endtoend_vtgate_schema_tracker.yml index edbd27f3bf6..ce19332eb79 100644 --- a/.github/workflows/cluster_endtoend_vtgate_schema_tracker.yml +++ b/.github/workflows/cluster_endtoend_vtgate_schema_tracker.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_tablet_healthcheck_cache.yml b/.github/workflows/cluster_endtoend_vtgate_tablet_healthcheck_cache.yml index 2ab51d691c2..2f236042ba2 100644 --- a/.github/workflows/cluster_endtoend_vtgate_tablet_healthcheck_cache.yml +++ b/.github/workflows/cluster_endtoend_vtgate_tablet_healthcheck_cache.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_topo.yml b/.github/workflows/cluster_endtoend_vtgate_topo.yml index ce316847045..b2ddf73d8bc 100644 --- a/.github/workflows/cluster_endtoend_vtgate_topo.yml +++ b/.github/workflows/cluster_endtoend_vtgate_topo.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_topo_consul.yml b/.github/workflows/cluster_endtoend_vtgate_topo_consul.yml index eac4ada3249..e1466ce86d1 100644 --- a/.github/workflows/cluster_endtoend_vtgate_topo_consul.yml +++ b/.github/workflows/cluster_endtoend_vtgate_topo_consul.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_topo_etcd.yml b/.github/workflows/cluster_endtoend_vtgate_topo_etcd.yml index 7b108e5cbad..e067bdb404c 100644 --- a/.github/workflows/cluster_endtoend_vtgate_topo_etcd.yml +++ b/.github/workflows/cluster_endtoend_vtgate_topo_etcd.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_transaction.yml b/.github/workflows/cluster_endtoend_vtgate_transaction.yml index bbef397dea9..fb14d55437b 100644 --- a/.github/workflows/cluster_endtoend_vtgate_transaction.yml +++ b/.github/workflows/cluster_endtoend_vtgate_transaction.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_unsharded.yml b/.github/workflows/cluster_endtoend_vtgate_unsharded.yml index 23151270d1e..7c254301c98 100644 --- a/.github/workflows/cluster_endtoend_vtgate_unsharded.yml +++ b/.github/workflows/cluster_endtoend_vtgate_unsharded.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_vindex_heavy.yml b/.github/workflows/cluster_endtoend_vtgate_vindex_heavy.yml index ead14d0fd63..95c56466276 100644 --- a/.github/workflows/cluster_endtoend_vtgate_vindex_heavy.yml +++ b/.github/workflows/cluster_endtoend_vtgate_vindex_heavy.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtgate_vschema.yml b/.github/workflows/cluster_endtoend_vtgate_vschema.yml index 9c6abdc0ad2..9f5b1dcd9c9 100644 --- a/.github/workflows/cluster_endtoend_vtgate_vschema.yml +++ b/.github/workflows/cluster_endtoend_vtgate_vschema.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtorc.yml b/.github/workflows/cluster_endtoend_vtorc.yml index 631bac6fda1..e3df898859f 100644 --- a/.github/workflows/cluster_endtoend_vtorc.yml +++ b/.github/workflows/cluster_endtoend_vtorc.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vtorc_mysql57.yml b/.github/workflows/cluster_endtoend_vtorc_mysql57.yml index 04248744a26..be40ec5a142 100644 --- a/.github/workflows/cluster_endtoend_vtorc_mysql57.yml +++ b/.github/workflows/cluster_endtoend_vtorc_mysql57.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_vttablet_prscomplex.yml b/.github/workflows/cluster_endtoend_vttablet_prscomplex.yml index 48b336a6c11..5c7b8c0388a 100644 --- a/.github/workflows/cluster_endtoend_vttablet_prscomplex.yml +++ b/.github/workflows/cluster_endtoend_vttablet_prscomplex.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_xb_backup.yml b/.github/workflows/cluster_endtoend_xb_backup.yml index 8a3158fadd9..a13e3d1f337 100644 --- a/.github/workflows/cluster_endtoend_xb_backup.yml +++ b/.github/workflows/cluster_endtoend_xb_backup.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_xb_backup_mysql57.yml b/.github/workflows/cluster_endtoend_xb_backup_mysql57.yml index e34ce3e1e8a..1dca8baa2c6 100644 --- a/.github/workflows/cluster_endtoend_xb_backup_mysql57.yml +++ b/.github/workflows/cluster_endtoend_xb_backup_mysql57.yml @@ -53,13 +53,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -75,7 +77,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_xb_recovery.yml b/.github/workflows/cluster_endtoend_xb_recovery.yml index 6f62db39ce8..8c6775fe1e8 100644 --- a/.github/workflows/cluster_endtoend_xb_recovery.yml +++ b/.github/workflows/cluster_endtoend_xb_recovery.yml @@ -49,13 +49,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -71,7 +73,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/cluster_endtoend_xb_recovery_mysql57.yml b/.github/workflows/cluster_endtoend_xb_recovery_mysql57.yml index 2a9fe0eba92..222e35780bf 100644 --- a/.github/workflows/cluster_endtoend_xb_recovery_mysql57.yml +++ b/.github/workflows/cluster_endtoend_xb_recovery_mysql57.yml @@ -53,13 +53,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' @@ -75,7 +77,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/codeql_analysis.yml b/.github/workflows/codeql_analysis.yml index 2ec63e4fe5c..025b8700d64 100644 --- a/.github/workflows/codeql_analysis.yml +++ b/.github/workflows/codeql_analysis.yml @@ -44,7 +44,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Get base dependencies run: | diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml index 86bd5b686a0..45d29c15ace 100644 --- a/.github/workflows/create_release.yml +++ b/.github/workflows/create_release.yml @@ -20,7 +20,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Setup node uses: actions/setup-node@v3 diff --git a/.github/workflows/docker_test_cluster_10.yml b/.github/workflows/docker_test_cluster_10.yml index 8b9d2e278ae..829bf292d3b 100644 --- a/.github/workflows/docker_test_cluster_10.yml +++ b/.github/workflows/docker_test_cluster_10.yml @@ -31,7 +31,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -54,7 +54,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/docker_test_cluster_25.yml b/.github/workflows/docker_test_cluster_25.yml index 007035b6e71..020d344a385 100644 --- a/.github/workflows/docker_test_cluster_25.yml +++ b/.github/workflows/docker_test_cluster_25.yml @@ -31,7 +31,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -54,7 +54,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/e2e_race.yml b/.github/workflows/e2e_race.yml index 55b84fb378a..dc3426771bb 100644 --- a/.github/workflows/e2e_race.yml +++ b/.github/workflows/e2e_race.yml @@ -30,7 +30,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -52,7 +52,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/endtoend.yml b/.github/workflows/endtoend.yml index 57802c676a9..da6ab849f7a 100644 --- a/.github/workflows/endtoend.yml +++ b/.github/workflows/endtoend.yml @@ -30,7 +30,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -52,7 +52,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/local_example.yml b/.github/workflows/local_example.yml index 30ab7e3e335..605ff4112dc 100644 --- a/.github/workflows/local_example.yml +++ b/.github/workflows/local_example.yml @@ -34,7 +34,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -57,7 +57,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.examples == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - uses: actions/setup-node@v3 if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.examples == 'true' diff --git a/.github/workflows/region_example.yml b/.github/workflows/region_example.yml index 6a87d0de54f..b59d2a780d5 100644 --- a/.github/workflows/region_example.yml +++ b/.github/workflows/region_example.yml @@ -34,7 +34,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -57,7 +57,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.examples == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - uses: actions/setup-node@v3 if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.examples == 'true' diff --git a/.github/workflows/static_checks_etc.yml b/.github/workflows/static_checks_etc.yml index 5f45af00726..be2aea307c6 100644 --- a/.github/workflows/static_checks_etc.yml +++ b/.github/workflows/static_checks_etc.yml @@ -35,7 +35,7 @@ jobs: - name: Check for changes in Go files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -106,7 +106,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && (steps.changes.outputs.go_files == 'true' || steps.changes.outputs.parser_changes == 'true' || steps.changes.outputs.proto_changes == 'true') uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.go_files == 'true' diff --git a/.github/workflows/unit_race.yml b/.github/workflows/unit_race.yml index fe7fa7e684e..e704f1f17dd 100644 --- a/.github/workflows/unit_race.yml +++ b/.github/workflows/unit_race.yml @@ -35,7 +35,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -57,7 +57,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.unit_tests == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Tune the OS if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.unit_tests == 'true' diff --git a/.github/workflows/unit_test_mysql57.yml b/.github/workflows/unit_test_mysql57.yml index a1bb1653775..3645fc9046a 100644 --- a/.github/workflows/unit_test_mysql57.yml +++ b/.github/workflows/unit_test_mysql57.yml @@ -49,7 +49,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -71,7 +71,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.unit_tests == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.unit_tests == 'true' diff --git a/.github/workflows/unit_test_mysql80.yml b/.github/workflows/unit_test_mysql80.yml index 6c9416be543..a7972f157d6 100644 --- a/.github/workflows/unit_test_mysql80.yml +++ b/.github/workflows/unit_test_mysql80.yml @@ -49,7 +49,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -71,7 +71,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.unit_tests == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.unit_tests == 'true' diff --git a/.github/workflows/update_golang_version.yml b/.github/workflows/update_golang_version.yml index e2e8a219687..51e757ef48f 100644 --- a/.github/workflows/update_golang_version.yml +++ b/.github/workflows/update_golang_version.yml @@ -22,7 +22,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Check out code uses: actions/checkout@v3 diff --git a/.github/workflows/upgrade_downgrade_test_backups_e2e.yml b/.github/workflows/upgrade_downgrade_test_backups_e2e.yml index c59319e2eb2..3763f3ec132 100644 --- a/.github/workflows/upgrade_downgrade_test_backups_e2e.yml +++ b/.github/workflows/upgrade_downgrade_test_backups_e2e.yml @@ -49,7 +49,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -72,7 +72,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_backups_e2e_next_release.yml b/.github/workflows/upgrade_downgrade_test_backups_e2e_next_release.yml index ebebc27925a..1ea62e10b09 100644 --- a/.github/workflows/upgrade_downgrade_test_backups_e2e_next_release.yml +++ b/.github/workflows/upgrade_downgrade_test_backups_e2e_next_release.yml @@ -51,7 +51,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -74,7 +74,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.22.0 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_backups_manual.yml b/.github/workflows/upgrade_downgrade_test_backups_manual.yml index 02b57cc8cd0..235089eb0a0 100644 --- a/.github/workflows/upgrade_downgrade_test_backups_manual.yml +++ b/.github/workflows/upgrade_downgrade_test_backups_manual.yml @@ -52,7 +52,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -76,7 +76,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_backups_manual_next_release.yml b/.github/workflows/upgrade_downgrade_test_backups_manual_next_release.yml index 95eb69a9fa9..89a4c06cec0 100644 --- a/.github/workflows/upgrade_downgrade_test_backups_manual_next_release.yml +++ b/.github/workflows/upgrade_downgrade_test_backups_manual_next_release.yml @@ -53,7 +53,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -77,7 +77,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.22.0 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_query_serving_queries.yml b/.github/workflows/upgrade_downgrade_test_query_serving_queries.yml index 60bd2d6a651..4afa0317314 100644 --- a/.github/workflows/upgrade_downgrade_test_query_serving_queries.yml +++ b/.github/workflows/upgrade_downgrade_test_query_serving_queries.yml @@ -52,7 +52,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -75,7 +75,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_query_serving_queries_next_release.yml b/.github/workflows/upgrade_downgrade_test_query_serving_queries_next_release.yml index 72f8ef905da..838676d0c24 100644 --- a/.github/workflows/upgrade_downgrade_test_query_serving_queries_next_release.yml +++ b/.github/workflows/upgrade_downgrade_test_query_serving_queries_next_release.yml @@ -53,7 +53,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -76,7 +76,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.22.0 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_query_serving_schema.yml b/.github/workflows/upgrade_downgrade_test_query_serving_schema.yml index 6e950abe82f..a1f88edd59a 100644 --- a/.github/workflows/upgrade_downgrade_test_query_serving_schema.yml +++ b/.github/workflows/upgrade_downgrade_test_query_serving_schema.yml @@ -52,7 +52,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -75,7 +75,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_query_serving_schema_next_release.yml b/.github/workflows/upgrade_downgrade_test_query_serving_schema_next_release.yml index 2afa2dd7354..73e913a7c70 100644 --- a/.github/workflows/upgrade_downgrade_test_query_serving_schema_next_release.yml +++ b/.github/workflows/upgrade_downgrade_test_query_serving_schema_next_release.yml @@ -53,7 +53,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -76,7 +76,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.22.0 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_reparent_new_vtctl.yml b/.github/workflows/upgrade_downgrade_test_reparent_new_vtctl.yml index bffe3894701..6599fa58c32 100644 --- a/.github/workflows/upgrade_downgrade_test_reparent_new_vtctl.yml +++ b/.github/workflows/upgrade_downgrade_test_reparent_new_vtctl.yml @@ -53,7 +53,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -76,7 +76,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.22.0 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_reparent_new_vttablet.yml b/.github/workflows/upgrade_downgrade_test_reparent_new_vttablet.yml index c14c0a62c5f..5756fda4621 100644 --- a/.github/workflows/upgrade_downgrade_test_reparent_new_vttablet.yml +++ b/.github/workflows/upgrade_downgrade_test_reparent_new_vttablet.yml @@ -53,7 +53,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -76,7 +76,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.22.0 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_reparent_old_vtctl.yml b/.github/workflows/upgrade_downgrade_test_reparent_old_vtctl.yml index 34c9e2796f8..c5d5b33a19b 100644 --- a/.github/workflows/upgrade_downgrade_test_reparent_old_vtctl.yml +++ b/.github/workflows/upgrade_downgrade_test_reparent_old_vtctl.yml @@ -52,7 +52,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -75,7 +75,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/.github/workflows/upgrade_downgrade_test_reparent_old_vttablet.yml b/.github/workflows/upgrade_downgrade_test_reparent_old_vttablet.yml index a0bde12d6a3..7a0f9f58da4 100644 --- a/.github/workflows/upgrade_downgrade_test_reparent_old_vttablet.yml +++ b/.github/workflows/upgrade_downgrade_test_reparent_old_vttablet.yml @@ -52,7 +52,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' @@ -75,7 +75,7 @@ jobs: if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' uses: actions/setup-go@v4 with: - go-version: 1.21.4 + go-version: 1.21.7 - name: Set up python if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true' diff --git a/docker/stardb/vtgate_start_script.sh b/docker/stardb/vtgate_start_script.sh index decd3346999..26615cf73f2 100755 --- a/docker/stardb/vtgate_start_script.sh +++ b/docker/stardb/vtgate_start_script.sh @@ -55,6 +55,7 @@ function vtgate_start() { --mysql_server_version $MYSQL_VERSION \ --pid_file /export/data/mysql/tmp/vtgate.pid \ --mysql_auth_server_config_file /vt/config/vtgate/user.json \ + --vschema_ddl_authorized_users % \ > /export/data/mysql/tmp/vtgate.out 2>&1" else ARGS="--alsologtostderr \ @@ -78,6 +79,7 @@ function vtgate_start() { --mysql_server_version $MYSQL_VERSION \ --pid_file /export/data/mysql/tmp/vtgate.pid \ --mysql_auth_server_config_file /vt/config/vtgate/user.json \ + --vschema_ddl_authorized_users % \ > /export/data/mysql/tmp/vtgate.out 2>&1" fi echo "su -c \"/export/bin/vtgate ${ARGS}\" vitess" diff --git a/docker/stardb/vttablet_start_script.sh b/docker/stardb/vttablet_start_script.sh index 19e15d6f0ad..40d09ff6f1e 100755 --- a/docker/stardb/vttablet_start_script.sh +++ b/docker/stardb/vttablet_start_script.sh @@ -57,8 +57,6 @@ function vttablet_start() { --unhealthy_threshold $unHealthyThreshold \ --queryserver-config-schema-reload-time 60 \ --init_tablet_type $TABLET_TYPE \ - --heartbeat_enable \ - --heartbeat_interval=1000ms \ --enforce_strict_trans_tables=false \ --watch_replication_stream=true" else @@ -91,8 +89,6 @@ function vttablet_start() { --unhealthy_threshold $unHealthyThreshold \ --queryserver-config-schema-reload-time 60 \ --init_tablet_type $TABLET_TYPE \ - --heartbeat_enable \ - --heartbeat_interval=1000ms \ --enforce_strict_trans_tables=false \ --watch_replication_stream=true" fi diff --git a/docker/vttestserver/Dockerfile.mysql57 b/docker/vttestserver/Dockerfile.mysql57 index b515873d4bd..850f2614ff6 100644 --- a/docker/vttestserver/Dockerfile.mysql57 +++ b/docker/vttestserver/Dockerfile.mysql57 @@ -58,4 +58,4 @@ USER vitess COPY docker/vttestserver/setup_vschema_folder.sh /vt/setup_vschema_folder.sh COPY docker/vttestserver/run.sh /vt/run.sh -CMD /vt/run.sh "5.7.9-vitess" +CMD /vt/run.sh "5.7.31-vitess" diff --git a/docker/vttestserver/Dockerfile.mysql80 b/docker/vttestserver/Dockerfile.mysql80 index ad3319cc83d..9dd3df535e5 100644 --- a/docker/vttestserver/Dockerfile.mysql80 +++ b/docker/vttestserver/Dockerfile.mysql80 @@ -58,4 +58,4 @@ USER vitess COPY docker/vttestserver/setup_vschema_folder.sh /vt/setup_vschema_folder.sh COPY docker/vttestserver/run.sh /vt/run.sh -CMD /vt/run.sh "8.0.21-vitess" +CMD /vt/run.sh "8.0.30-Vitess" diff --git a/examples/common/scripts/vttablet-up.sh b/examples/common/scripts/vttablet-up.sh index 629a21746ba..bd8994b37c4 100755 --- a/examples/common/scripts/vttablet-up.sh +++ b/examples/common/scripts/vttablet-up.sh @@ -54,8 +54,6 @@ vttablet \ --service_map 'grpc-queryservice,grpc-tabletmanager,grpc-updatestream' \ --pid_file $VTDATAROOT/$tablet_dir/vttablet.pid \ --queryserver-config-max-result-size 10000000 \ - --heartbeat_enable \ - --heartbeat_interval=250ms \ > $VTDATAROOT/$tablet_dir/vttablet.out 2>&1 & # Block waiting for the tablet to be listening diff --git a/go/cmd/mysqlctld/cli/mysqlctld.go b/go/cmd/mysqlctld/cli/mysqlctld.go index 6ebaa5dc422..db0a3c1e25a 100644 --- a/go/cmd/mysqlctld/cli/mysqlctld.go +++ b/go/cmd/mysqlctld/cli/mysqlctld.go @@ -64,6 +64,7 @@ var ( --mysql_port=17100 \ --socket_file=/path/to/socket_file`, Args: cobra.NoArgs, + Version: servenv.AppVersion.String(), PreRunE: servenv.CobraPreRunE, RunE: run, } diff --git a/go/cmd/topo2topo/cli/topo2topo.go b/go/cmd/topo2topo/cli/topo2topo.go index 6e7e173872b..0fd553954a9 100644 --- a/go/cmd/topo2topo/cli/topo2topo.go +++ b/go/cmd/topo2topo/cli/topo2topo.go @@ -51,6 +51,7 @@ var ( It can also be used to compare data between two topologies.`, Args: cobra.NoArgs, PreRunE: servenv.CobraPreRunE, + Version: servenv.AppVersion.String(), RunE: run, } ) diff --git a/go/cmd/vtcombo/cli/main.go b/go/cmd/vtcombo/cli/main.go index bfc0ad894fe..b272b4c56f1 100644 --- a/go/cmd/vtcombo/cli/main.go +++ b/go/cmd/vtcombo/cli/main.go @@ -163,7 +163,7 @@ func run(cmd *cobra.Command, args []string) (err error) { // vtctld UI requires the cell flag cmd.Flags().Set("cell", tpb.Cells[0]) - if cmd.Flags().Lookup("log_dir") == nil { + if f := cmd.Flags().Lookup("log_dir"); f != nil && !f.Changed { cmd.Flags().Set("log_dir", "$VTDATAROOT/tmp") } diff --git a/go/cmd/vtexplain/cli/vtexplain.go b/go/cmd/vtexplain/cli/vtexplain.go index 8b0622cf8a3..3c5c8c04791 100644 --- a/go/cmd/vtexplain/cli/vtexplain.go +++ b/go/cmd/vtexplain/cli/vtexplain.go @@ -24,6 +24,7 @@ import ( "vitess.io/vitess/go/acl" "vitess.io/vitess/go/vt/logutil" "vitess.io/vitess/go/vt/servenv" + "vitess.io/vitess/go/vt/topo/memorytopo" "vitess.io/vitess/go/vt/vtexplain" "vitess.io/vitess/go/vt/vtgate/planbuilder/plancontext" @@ -84,6 +85,7 @@ If no keyspace name is present, VTExplain will return the following error: "```\nvtexplain -- -shards 128 --vschema-file vschema.json --schema-file schema.sql --replication-mode \"ROW\" --output-mode text --sql \"INSERT INTO users (user_id, name) VALUES(1, 'john')\"\n```\n", Args: cobra.NoArgs, PreRunE: servenv.CobraPreRunE, + Version: servenv.AppVersion.String(), RunE: run, } ) @@ -175,7 +177,9 @@ func parseAndRun() error { Target: dbName, } - vte, err := vtexplain.Init(context.Background(), vschema, schema, ksShardMap, opts) + ctx := context.Background() + ts := memorytopo.NewServer(ctx, vtexplain.Cell) + vte, err := vtexplain.Init(context.Background(), ts, vschema, schema, ksShardMap, opts) if err != nil { return err } diff --git a/go/cmd/vtgate/v18_0_0 b/go/cmd/vtgate/v18_0_0 new file mode 100755 index 00000000000..65c8263385e Binary files /dev/null and b/go/cmd/vtgate/v18_0_0 differ diff --git a/go/cmd/vttestserver/cli/main.go b/go/cmd/vttestserver/cli/main.go index f9a2f16cd87..8b990d2d09a 100644 --- a/go/cmd/vttestserver/cli/main.go +++ b/go/cmd/vttestserver/cli/main.go @@ -104,6 +104,7 @@ func New() (cmd *cobra.Command) { Short: "vttestserver allows users to spawn a self-contained Vitess server for local testing/CI.", Args: cobra.NoArgs, PreRunE: servenv.CobraPreRunE, + Version: servenv.AppVersion.String(), RunE: run, } diff --git a/go/cmd/zkctld/cli/zkctld.go b/go/cmd/zkctld/cli/zkctld.go index 101f1013722..5ac3520868e 100644 --- a/go/cmd/zkctld/cli/zkctld.go +++ b/go/cmd/zkctld/cli/zkctld.go @@ -41,6 +41,7 @@ var ( Use: "zkctld", Short: "zkctld is a daemon that starts or initializes ZooKeeper with Vitess-specific configuration. It will stay running as long as the underlying ZooKeeper server, and will pass along SIGTERM.", Args: cobra.NoArgs, + Version: servenv.AppVersion.String(), PersistentPreRunE: servenv.CobraPreRunE, PostRun: func(cmd *cobra.Command, args []string) { logutil.Flush() diff --git a/go/event/syslogger/syslogger.go b/go/event/syslogger/syslogger.go index 1c8ff22136b..166af83a101 100644 --- a/go/event/syslogger/syslogger.go +++ b/go/event/syslogger/syslogger.go @@ -52,9 +52,11 @@ import ( "fmt" "log/syslog" "os" + "testing" "vitess.io/vitess/go/event" "vitess.io/vitess/go/vt/log" + "vitess.io/vitess/go/vt/servenv" ) // Syslogger is the interface that events should implement if they want to be @@ -143,10 +145,28 @@ func listener(ev Syslogger) { } func init() { + // We only want to init syslog when the app is being initialized + // Some binaries import the syslog package indirectly leading to + // the syslog.New function being called and this might fail if + // running inside Docker without the syslog daemon enabled, leading + // logging the error which will make glog think there are not --log_dir + // flag set as we have not parsed the flags yet. + // https://github.com/vitessio/vitess/issues/15120 + servenv.OnInit(func() { + initSyslog() + }) + + // We still do the init of syslog if we are testing this package. + if testing.Testing() { + initSyslog() + } +} + +func initSyslog() { var err error writer, err = syslog.New(syslog.LOG_INFO|syslog.LOG_USER, os.Args[0]) if err != nil { - log.Errorf("can't connect to syslog") + log.Errorf("can't connect to syslog: %v", err.Error()) writer = nil } diff --git a/go/flags/endtoend/vtbackup.txt b/go/flags/endtoend/vtbackup.txt index 8c28c48a86b..a2f632b3475 100644 --- a/go/flags/endtoend/vtbackup.txt +++ b/go/flags/endtoend/vtbackup.txt @@ -204,7 +204,7 @@ Flags: --stderrthreshold severityFlag logs at or above this threshold go to stderr (default 1) --tablet_manager_grpc_ca string the server ca to use to validate servers when connecting --tablet_manager_grpc_cert string the cert to use to connect - --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App}) (default 8) + --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler) (default 8) --tablet_manager_grpc_connpool_size int number of tablets to keep tmclient connections open to (default 100) --tablet_manager_grpc_crl string the server crl to use to validate server certificates when connecting --tablet_manager_grpc_key string the key to use to connect diff --git a/go/flags/endtoend/vtcombo.txt b/go/flags/endtoend/vtcombo.txt index 822cecdfc30..4b08848b9f6 100644 --- a/go/flags/endtoend/vtcombo.txt +++ b/go/flags/endtoend/vtcombo.txt @@ -340,7 +340,7 @@ Flags: --tablet_hostname string if not empty, this hostname will be assumed instead of trying to resolve it --tablet_manager_grpc_ca string the server ca to use to validate servers when connecting --tablet_manager_grpc_cert string the cert to use to connect - --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App}) (default 8) + --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler) (default 8) --tablet_manager_grpc_connpool_size int number of tablets to keep tmclient connections open to (default 100) --tablet_manager_grpc_crl string the server crl to use to validate server certificates when connecting --tablet_manager_grpc_key string the key to use to connect diff --git a/go/flags/endtoend/vtctld.txt b/go/flags/endtoend/vtctld.txt index 6bf7a8d713e..7e8999ef3ff 100644 --- a/go/flags/endtoend/vtctld.txt +++ b/go/flags/endtoend/vtctld.txt @@ -137,7 +137,7 @@ Flags: --tablet_health_keep_alive duration close streaming tablet health connection if there are no requests for this long (default 5m0s) --tablet_manager_grpc_ca string the server ca to use to validate servers when connecting --tablet_manager_grpc_cert string the cert to use to connect - --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App}) (default 8) + --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler) (default 8) --tablet_manager_grpc_connpool_size int number of tablets to keep tmclient connections open to (default 100) --tablet_manager_grpc_crl string the server crl to use to validate server certificates when connecting --tablet_manager_grpc_key string the key to use to connect diff --git a/go/flags/endtoend/vtorc.txt b/go/flags/endtoend/vtorc.txt index 2cf3c885abc..1757a02cfb9 100644 --- a/go/flags/endtoend/vtorc.txt +++ b/go/flags/endtoend/vtorc.txt @@ -80,7 +80,7 @@ Flags: --table-refresh-interval int interval in milliseconds to refresh tables in status page with refreshRequired class --tablet_manager_grpc_ca string the server ca to use to validate servers when connecting --tablet_manager_grpc_cert string the cert to use to connect - --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App}) (default 8) + --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler) (default 8) --tablet_manager_grpc_connpool_size int number of tablets to keep tmclient connections open to (default 100) --tablet_manager_grpc_crl string the server crl to use to validate server certificates when connecting --tablet_manager_grpc_key string the key to use to connect diff --git a/go/flags/endtoend/vttablet.txt b/go/flags/endtoend/vttablet.txt index aca2be853e4..317c17b1a17 100644 --- a/go/flags/endtoend/vttablet.txt +++ b/go/flags/endtoend/vttablet.txt @@ -348,7 +348,7 @@ Flags: --tablet_hostname string if not empty, this hostname will be assumed instead of trying to resolve it --tablet_manager_grpc_ca string the server ca to use to validate servers when connecting --tablet_manager_grpc_cert string the cert to use to connect - --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App}) (default 8) + --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler) (default 8) --tablet_manager_grpc_connpool_size int number of tablets to keep tmclient connections open to (default 100) --tablet_manager_grpc_crl string the server crl to use to validate server certificates when connecting --tablet_manager_grpc_key string the key to use to connect diff --git a/go/flags/endtoend/vttestserver.txt b/go/flags/endtoend/vttestserver.txt index ad04fd83bf0..6ae2ee8af02 100644 --- a/go/flags/endtoend/vttestserver.txt +++ b/go/flags/endtoend/vttestserver.txt @@ -118,7 +118,7 @@ Flags: --tablet_hostname string The hostname to use for the tablet otherwise it will be derived from OS' hostname (default "localhost") --tablet_manager_grpc_ca string the server ca to use to validate servers when connecting --tablet_manager_grpc_cert string the cert to use to connect - --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App}) (default 8) + --tablet_manager_grpc_concurrency int concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler) (default 8) --tablet_manager_grpc_connpool_size int number of tablets to keep tmclient connections open to (default 100) --tablet_manager_grpc_crl string the server crl to use to validate server certificates when connecting --tablet_manager_grpc_key string the key to use to connect diff --git a/go/flags/endtoend/zkctld.txt b/go/flags/endtoend/zkctld.txt index d808bd7ce67..20371e9e2d7 100644 --- a/go/flags/endtoend/zkctld.txt +++ b/go/flags/endtoend/zkctld.txt @@ -4,4 +4,5 @@ Usage: zkctld [flags] Flags: - -h, --help help for zkctld + -h, --help help for zkctld + -v, --version version for zkctld diff --git a/go/mysql/collations/env.go b/go/mysql/collations/env.go index 91fc2a8bd8c..878721ecf29 100644 --- a/go/mysql/collations/env.go +++ b/go/mysql/collations/env.go @@ -301,3 +301,8 @@ func (env *Environment) LookupByCharset(name string) *colldefaults { func (env *Environment) LookupCharsetName(coll ID) string { return env.byCharsetName[coll] } + +func (env *Environment) IsSupported(coll ID) bool { + _, supported := env.byID[coll] + return supported +} diff --git a/go/mysql/conn.go b/go/mysql/conn.go index e9cfb490013..91205503ded 100644 --- a/go/mysql/conn.go +++ b/go/mysql/conn.go @@ -1891,6 +1891,11 @@ func (c *Conn) ReConnectCrossTablet() error { Flags: uint64(c.StatusFlags | ClientLocalFiles), Charset: "utf8mb4", DbName: schema} + + if c.Capabilities|CapabilityClientFoundRows == c.Capabilities { + backendConnParam.Flags |= uint64(CapabilityClientFoundRows) + } + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() crossconn, err := Connect(ctx, backendConnParam) @@ -2125,7 +2130,7 @@ func (c *Conn) processData(data []byte, clientConn *Conn) error { if c.isEOFPacket(rowData) { _, statusFlag, _ := parse41EOFPacket(rowData) if clientConn.Capabilities&CapabilityClientDeprecateEOF == 0 { - if err := clientConn.writeEOFPacket(clientConn.StatusFlags|(statusFlag&ServerMoreResultsExists), 0); err != nil { + if err := clientConn.writeEOFPacket(statusFlag, 0); err != nil { return err } //if err := clientConn.flush(); err != nil { @@ -2149,7 +2154,7 @@ func (c *Conn) processData(data []byte, clientConn *Conn) error { } //writeOKPacketWithEOFHeader will flush - if err := clientConn.writeOKPacketWithEOFHeader(&PacketOK{statusFlags: clientConn.StatusFlags | (statusFlag & ServerMoreResultsExists)}); err != nil { + if err := clientConn.writeOKPacketWithEOFHeader(&PacketOK{statusFlags: statusFlag}); err != nil { return err } /*if err := clientConn.flush(); err != nil { @@ -2285,7 +2290,7 @@ func (c *Conn) ptComPrepare(data []byte, clientConn *Conn) error { func parse41EOFPacket(data []byte) (uint16, uint16, error) { pos := 1 - numWarnings, _, ok := readUint16(data, pos) + numWarnings, pos, ok := readUint16(data, pos) if !ok { return 0, 0, fmt.Errorf("error 41 eof packet") } diff --git a/go/mysql/datetime/datetime.go b/go/mysql/datetime/datetime.go index 73ae234e932..9f22f0ed736 100644 --- a/go/mysql/datetime/datetime.go +++ b/go/mysql/datetime/datetime.go @@ -543,9 +543,9 @@ func (dt DateTime) Compare(dt2 DateTime) int { return dt.Time.Compare(dt2.Time) } -func (dt DateTime) AddInterval(itv *Interval, stradd bool) (DateTime, uint8, bool) { +func (dt DateTime) AddInterval(itv *Interval, prec uint8, stradd bool) (DateTime, uint8, bool) { ok := dt.addInterval(itv) - return dt, itv.precision(stradd), ok + return dt, max(prec, itv.precision(stradd)), ok } func (dt DateTime) Round(p int) (r DateTime) { diff --git a/go/mysql/sqlerror/sql_error.go b/go/mysql/sqlerror/sql_error.go index df4980a381b..41a649a9a17 100644 --- a/go/mysql/sqlerror/sql_error.go +++ b/go/mysql/sqlerror/sql_error.go @@ -243,6 +243,7 @@ var stateToMysqlCode = map[vterrors.State]mysqlCode{ vterrors.CharacterSetMismatch: {num: ERCharacterSetMismatch, state: SSUnknownSQLState}, vterrors.WrongParametersToNativeFct: {num: ERWrongParametersToNativeFct, state: SSUnknownSQLState}, vterrors.KillDeniedError: {num: ERKillDenied, state: SSUnknownSQLState}, + vterrors.InvalidGroupFuncUse: {num: ERInvalidGroupFuncUse, state: SSUnknownSQLState}, } func getStateToMySQLState(state vterrors.State) mysqlCode { diff --git a/go/ptr/ptr.go b/go/ptr/ptr.go new file mode 100644 index 00000000000..8fd7f6c0bf9 --- /dev/null +++ b/go/ptr/ptr.go @@ -0,0 +1,31 @@ +/* +Copyright 2024 The Vitess Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package ptr + +// Of returns a pointer to the given value +func Of[T any](x T) *T { + return &x +} + +// Unwrap dereferences the given pointer if it's not nil. +// Otherwise, it returns default_ +func Unwrap[T any](x *T, default_ T) T { + if x != nil { + return *x + } + return default_ +} diff --git a/go/sqltypes/testing.go b/go/sqltypes/testing.go index 9042acf6680..c274a15daf7 100644 --- a/go/sqltypes/testing.go +++ b/go/sqltypes/testing.go @@ -77,7 +77,7 @@ func MakeTestResult(fields []*querypb.Field, rows ...string) *Result { for i, row := range rows { result.Rows[i] = make([]Value, len(fields)) for j, col := range split(row) { - if col == "null" { + if strings.ToLower(col) == "null" { result.Rows[i][j] = NULL continue } diff --git a/go/test/endtoend/backup/vtbackup/backup_only_test.go b/go/test/endtoend/backup/vtbackup/backup_only_test.go index e84346b846c..f9de4dffaa3 100644 --- a/go/test/endtoend/backup/vtbackup/backup_only_test.go +++ b/go/test/endtoend/backup/vtbackup/backup_only_test.go @@ -326,12 +326,12 @@ func tearDown(t *testing.T, initMysql bool) { } caughtUp := waitForReplicationToCatchup([]cluster.Vttablet{*replica1, *replica2}) require.True(t, caughtUp, "Timed out waiting for all replicas to catch up") - promoteCommands := "STOP SLAVE; RESET SLAVE ALL; RESET MASTER;" - disableSemiSyncCommands := "SET GLOBAL rpl_semi_sync_master_enabled = false; SET GLOBAL rpl_semi_sync_slave_enabled = false" + promoteCommands := []string{"STOP SLAVE", "RESET SLAVE ALL", "RESET MASTER"} + disableSemiSyncCommands := []string{"SET GLOBAL rpl_semi_sync_master_enabled = false", " SET GLOBAL rpl_semi_sync_slave_enabled = false"} for _, tablet := range []cluster.Vttablet{*primary, *replica1, *replica2} { - _, err := tablet.VttabletProcess.QueryTablet(promoteCommands, keyspaceName, true) + err := tablet.VttabletProcess.QueryTabletMultiple(promoteCommands, keyspaceName, true) require.Nil(t, err) - _, err = tablet.VttabletProcess.QueryTablet(disableSemiSyncCommands, keyspaceName, true) + err = tablet.VttabletProcess.QueryTabletMultiple(disableSemiSyncCommands, keyspaceName, true) require.Nil(t, err) } diff --git a/go/test/endtoend/cluster/vttablet_process.go b/go/test/endtoend/cluster/vttablet_process.go index 4a0548dfa40..c825ccacee2 100644 --- a/go/test/endtoend/cluster/vttablet_process.go +++ b/go/test/endtoend/cluster/vttablet_process.go @@ -464,6 +464,34 @@ func (vttablet *VttabletProcess) QueryTablet(query string, keyspace string, useD return executeQuery(conn, query) } +// QueryTabletMultiple lets you execute multiple queries -- without any +// results -- against the tablet. +func (vttablet *VttabletProcess) QueryTabletMultiple(queries []string, keyspace string, useDb bool) error { + conn, err := vttablet.TabletConn(keyspace, useDb) + if err != nil { + return err + } + defer conn.Close() + + for _, query := range queries { + log.Infof("Executing query %s (on %s)", query, vttablet.Name) + _, err := executeQuery(conn, query) + if err != nil { + return err + } + } + return nil +} + +// TabletConn opens a MySQL connection on this tablet +func (vttablet *VttabletProcess) TabletConn(keyspace string, useDb bool) (*mysql.Conn, error) { + if !useDb { + keyspace = "" + } + dbParams := NewConnParams(vttablet.DbPort, vttablet.DbPassword, path.Join(vttablet.Directory, "mysql.sock"), keyspace) + return vttablet.conn(&dbParams) +} + func (vttablet *VttabletProcess) defaultConn(dbname string) (*mysql.Conn, error) { dbParams := mysql.ConnParams{ Uname: "vt_dba", diff --git a/go/test/endtoend/clustertest/vtctld_test.go b/go/test/endtoend/clustertest/vtctld_test.go index 45643d869b1..c1b341ccd73 100644 --- a/go/test/endtoend/clustertest/vtctld_test.go +++ b/go/test/endtoend/clustertest/vtctld_test.go @@ -128,9 +128,51 @@ func testTabletStatus(t *testing.T) { } func testExecuteAsDba(t *testing.T) { - result, err := clusterInstance.VtctlclientProcess.ExecuteCommandWithOutput("ExecuteFetchAsDba", clusterInstance.Keyspaces[0].Shards[0].Vttablets[0].Alias, `SELECT 1 AS a`) - require.NoError(t, err) - assert.Equal(t, result, oneTableOutput) + tcases := []struct { + query string + result string + expectErr bool + }{ + { + query: "", + expectErr: true, + }, + { + query: "SELECT 1 AS a", + result: oneTableOutput, + }, + { + query: "SELECT 1 AS a; SELECT 1 AS a", + expectErr: true, + }, + { + query: "create table t(id int)", + result: "", + }, + { + query: "create table if not exists t(id int)", + result: "", + }, + { + query: "create table if not exists t(id int); create table if not exists t(id int);", + result: "", + }, + { + query: "create table if not exists t(id int); create table if not exists t(id int); SELECT 1 AS a", + expectErr: true, + }, + } + for _, tcase := range tcases { + t.Run(tcase.query, func(t *testing.T) { + result, err := clusterInstance.VtctlclientProcess.ExecuteCommandWithOutput("ExecuteFetchAsDba", clusterInstance.Keyspaces[0].Shards[0].Vttablets[0].Alias, tcase.query) + if tcase.expectErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + assert.Equal(t, tcase.result, result) + } + }) + } } func testExecuteAsApp(t *testing.T) { diff --git a/go/test/endtoend/mysqlserver/main_test.go b/go/test/endtoend/mysqlserver/main_test.go index 2680bb5cb14..ce59ed11de2 100644 --- a/go/test/endtoend/mysqlserver/main_test.go +++ b/go/test/endtoend/mysqlserver/main_test.go @@ -51,7 +51,7 @@ var ( PARTITION BY HASH( TO_DAYS(created) ) PARTITIONS 10; ` - createProcSQL = `use test_keyspace; + createProcSQL = ` CREATE PROCEDURE testing() BEGIN delete from vt_insert_test; @@ -144,7 +144,7 @@ func TestMain(m *testing.M) { } primaryTabletProcess := clusterInstance.Keyspaces[0].Shards[0].PrimaryTablet().VttabletProcess - if _, err := primaryTabletProcess.QueryTablet(createProcSQL, keyspaceName, false); err != nil { + if _, err := primaryTabletProcess.QueryTablet(createProcSQL, keyspaceName, true); err != nil { return 1, err } diff --git a/go/test/endtoend/onlineddl/vrepl_suite/testdata/enum-reorder/alter b/go/test/endtoend/onlineddl/vrepl_suite/testdata/enum-reorder/alter new file mode 100644 index 00000000000..6e011c14192 --- /dev/null +++ b/go/test/endtoend/onlineddl/vrepl_suite/testdata/enum-reorder/alter @@ -0,0 +1 @@ +change e e enum('blue', 'green', 'red') not null diff --git a/go/test/endtoend/onlineddl/vrepl_suite/testdata/enum-reorder/create.sql b/go/test/endtoend/onlineddl/vrepl_suite/testdata/enum-reorder/create.sql new file mode 100644 index 00000000000..84ebd4094c1 --- /dev/null +++ b/go/test/endtoend/onlineddl/vrepl_suite/testdata/enum-reorder/create.sql @@ -0,0 +1,26 @@ +drop table if exists onlineddl_test; +create table onlineddl_test ( + id int auto_increment, + i int not null, + e enum('red', 'green', 'blue') not null, + primary key(id) +) auto_increment=1; + +insert into onlineddl_test values (null, 11, 'red'); +insert into onlineddl_test values (null, 13, 'green'); +insert into onlineddl_test values (null, 17, 'blue'); + +drop event if exists onlineddl_test; +delimiter ;; +create event onlineddl_test + on schedule every 1 second + starts current_timestamp + ends current_timestamp + interval 60 second + on completion not preserve + enable + do +begin + insert into onlineddl_test values (null, 211, 'red'); + insert into onlineddl_test values (null, 213, 'green'); + insert into onlineddl_test values (null, 217, 'blue'); +end ;; diff --git a/go/test/endtoend/reparent/emergencyreparent/ers_test.go b/go/test/endtoend/reparent/emergencyreparent/ers_test.go index 8f6638ecb7e..fbd4770e15e 100644 --- a/go/test/endtoend/reparent/emergencyreparent/ers_test.go +++ b/go/test/endtoend/reparent/emergencyreparent/ers_test.go @@ -349,8 +349,11 @@ func TestNoReplicationStatusAndIOThreadStopped(t *testing.T) { tablets := clusterInstance.Keyspaces[0].Shards[0].Vttablets utils.ConfirmReplication(t, tablets[0], []*cluster.Vttablet{tablets[1], tablets[2], tablets[3]}) - err := clusterInstance.VtctlclientProcess.ExecuteCommand("ExecuteFetchAsDba", tablets[1].Alias, `STOP SLAVE; RESET SLAVE ALL`) + err := clusterInstance.VtctlclientProcess.ExecuteCommand("ExecuteFetchAsDba", tablets[1].Alias, `STOP SLAVE`) require.NoError(t, err) + err = clusterInstance.VtctlclientProcess.ExecuteCommand("ExecuteFetchAsDba", tablets[1].Alias, `RESET SLAVE ALL`) + require.NoError(t, err) + // err = clusterInstance.VtctlclientProcess.ExecuteCommand("ExecuteFetchAsDba", tablets[3].Alias, `STOP SLAVE IO_THREAD;`) require.NoError(t, err) // Run an additional command in the current primary which will only be acked by tablets[2] and be in its relay log. diff --git a/go/test/endtoend/reparent/plannedreparent/reparent_test.go b/go/test/endtoend/reparent/plannedreparent/reparent_test.go index 45cbeb565c7..014570d8439 100644 --- a/go/test/endtoend/reparent/plannedreparent/reparent_test.go +++ b/go/test/endtoend/reparent/plannedreparent/reparent_test.go @@ -98,7 +98,7 @@ func TestPRSWithDrainedLaggingTablet(t *testing.T) { utils.ConfirmReplication(t, tablets[0], []*cluster.Vttablet{tablets[1], tablets[2], tablets[3]}) // make tablets[1 lag from the other tablets by setting the delay to a large number - utils.RunSQL(context.Background(), t, `stop slave;CHANGE MASTER TO MASTER_DELAY = 1999;start slave;`, tablets[1]) + utils.RunSQLs(context.Background(), t, []string{`stop slave`, `CHANGE MASTER TO MASTER_DELAY = 1999`, `start slave;`}, tablets[1]) // insert another row in tablets[1 utils.ConfirmReplication(t, tablets[0], []*cluster.Vttablet{tablets[2], tablets[3]}) @@ -226,26 +226,33 @@ func reparentFromOutside(t *testing.T, clusterInstance *cluster.LocalProcessClus } // commands to convert a replica to be writable - promoteReplicaCommands := "STOP SLAVE; RESET SLAVE ALL; SET GLOBAL read_only = OFF;" - utils.RunSQL(ctx, t, promoteReplicaCommands, tablets[1]) + promoteReplicaCommands := []string{"STOP SLAVE", "RESET SLAVE ALL", "SET GLOBAL read_only = OFF"} + utils.RunSQLs(ctx, t, promoteReplicaCommands, tablets[1]) // Get primary position _, gtID := cluster.GetPrimaryPosition(t, *tablets[1], utils.Hostname) // tablets[0] will now be a replica of tablets[1 - changeReplicationSourceCommands := fmt.Sprintf("RESET MASTER; RESET SLAVE; SET GLOBAL gtid_purged = '%s';"+ - "CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1;"+ - "START SLAVE;", gtID, utils.Hostname, tablets[1].MySQLPort) - utils.RunSQL(ctx, t, changeReplicationSourceCommands, tablets[0]) + changeReplicationSourceCommands := []string{ + "RESET MASTER", + "RESET SLAVE", + fmt.Sprintf("SET GLOBAL gtid_purged = '%s'", gtID), + fmt.Sprintf("CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1", utils.Hostname, tablets[1].MySQLPort), + } + utils.RunSQLs(ctx, t, changeReplicationSourceCommands, tablets[0]) // Capture time when we made tablets[1 writable baseTime := time.Now().UnixNano() / 1000000000 // tablets[2 will be a replica of tablets[1 - changeReplicationSourceCommands = fmt.Sprintf("STOP SLAVE; RESET MASTER; SET GLOBAL gtid_purged = '%s';"+ - "CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1;"+ - "START SLAVE;", gtID, utils.Hostname, tablets[1].MySQLPort) - utils.RunSQL(ctx, t, changeReplicationSourceCommands, tablets[2]) + changeReplicationSourceCommands = []string{ + "STOP SLAVE", + "RESET MASTER", + fmt.Sprintf("SET GLOBAL gtid_purged = '%s'", gtID), + fmt.Sprintf("CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1", utils.Hostname, tablets[1].MySQLPort), + "START SLAVE", + } + utils.RunSQLs(ctx, t, changeReplicationSourceCommands, tablets[2]) // To test the downPrimary, we kill the old primary first and delete its tablet record if downPrimary { diff --git a/go/test/endtoend/reparent/utils/utils.go b/go/test/endtoend/reparent/utils/utils.go index fc9e88f6471..e50b95500c0 100644 --- a/go/test/endtoend/reparent/utils/utils.go +++ b/go/test/endtoend/reparent/utils/utils.go @@ -260,6 +260,15 @@ func getMysqlConnParam(tablet *cluster.Vttablet) mysql.ConnParams { return connParams } +// RunSQLs is used to run SQL commands directly on the MySQL instance of a vttablet +func RunSQLs(ctx context.Context, t *testing.T, sqls []string, tablet *cluster.Vttablet) (results []*sqltypes.Result) { + for _, sql := range sqls { + result := RunSQL(ctx, t, sql, tablet) + results = append(results, result) + } + return results +} + // RunSQL is used to run a SQL command directly on the MySQL instance of a vttablet func RunSQL(ctx context.Context, t *testing.T, sql string, tablet *cluster.Vttablet) *sqltypes.Result { tabletParams := getMysqlConnParam(tablet) diff --git a/go/test/endtoend/tabletgateway/vtgate_test.go b/go/test/endtoend/tabletgateway/vtgate_test.go index 5617b6fbe63..4dc6bb9812c 100644 --- a/go/test/endtoend/tabletgateway/vtgate_test.go +++ b/go/test/endtoend/tabletgateway/vtgate_test.go @@ -28,15 +28,14 @@ import ( "testing" "time" - "vitess.io/vitess/go/test/endtoend/utils" - "vitess.io/vitess/go/vt/proto/topodata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "vitess.io/vitess/go/mysql" - "vitess.io/vitess/go/test/endtoend/cluster" + "vitess.io/vitess/go/test/endtoend/utils" + vtorcutils "vitess.io/vitess/go/test/endtoend/vtorc/utils" + "vitess.io/vitess/go/vt/proto/topodata" ) func TestVtgateHealthCheck(t *testing.T) { @@ -59,7 +58,7 @@ func TestVtgateReplicationStatusCheck(t *testing.T) { time.Sleep(2 * time.Second) verifyVtgateVariables(t, clusterInstance.VtgateProcess.VerifyURL) ctx := context.Background() - conn, err := mysql.Connect(ctx, &vtParams) + conn, err := mysql.Connect(ctx, &vtParams) // VTGate require.NoError(t, err) defer conn.Close() @@ -68,6 +67,38 @@ func TestVtgateReplicationStatusCheck(t *testing.T) { expectNumRows := 2 numRows := len(qr.Rows) assert.Equal(t, expectNumRows, numRows, fmt.Sprintf("wrong number of results from show vitess_replication_status. Expected %d, got %d", expectNumRows, numRows)) + + // Disable VTOrc(s) recoveries so that it doesn't immediately repair/restart replication. + for _, vtorcProcess := range clusterInstance.VTOrcProcesses { + vtorcutils.DisableGlobalRecoveries(t, vtorcProcess) + } + // Re-enable recoveries afterward as the cluster is re-used. + defer func() { + for _, vtorcProcess := range clusterInstance.VTOrcProcesses { + vtorcutils.EnableGlobalRecoveries(t, vtorcProcess) + } + }() + // Stop replication on the non-PRIMARY tablets. + _, err = clusterInstance.VtctldClientProcess.ExecuteCommandWithOutput("ExecuteFetchAsDBA", clusterInstance.Keyspaces[0].Shards[0].Replica().Alias, "stop slave") + require.NoError(t, err) + _, err = clusterInstance.VtctldClientProcess.ExecuteCommandWithOutput("ExecuteFetchAsDBA", clusterInstance.Keyspaces[0].Shards[0].Rdonly().Alias, "stop slave") + require.NoError(t, err) + // Restart replication afterward as the cluster is re-used. + defer func() { + _, err = clusterInstance.VtctldClientProcess.ExecuteCommandWithOutput("ExecuteFetchAsDBA", clusterInstance.Keyspaces[0].Shards[0].Replica().Alias, "start slave") + require.NoError(t, err) + _, err = clusterInstance.VtctldClientProcess.ExecuteCommandWithOutput("ExecuteFetchAsDBA", clusterInstance.Keyspaces[0].Shards[0].Rdonly().Alias, "start slave") + require.NoError(t, err) + }() + time.Sleep(2 * time.Second) // Build up some replication lag + res, err := conn.ExecuteFetch("show vitess_replication_status", 2, false) + require.NoError(t, err) + expectNumRows = 2 + numRows = len(qr.Rows) + assert.Equal(t, expectNumRows, numRows, fmt.Sprintf("wrong number of results from show vitess_replication_status, expected %d, got %d", expectNumRows, numRows)) + rawLag := res.Named().Rows[0]["ReplicationLag"] // Let's just look at the first row + lagInt, _ := rawLag.ToInt64() // Don't check the error as the value could be "NULL" + assert.True(t, rawLag.IsNull() || lagInt > 0, "replication lag should be NULL or greater than 0 but was: %s", rawLag.ToString()) } func TestVtgateReplicationStatusCheckWithTabletTypeChange(t *testing.T) { @@ -90,6 +121,11 @@ func TestVtgateReplicationStatusCheckWithTabletTypeChange(t *testing.T) { rdOnlyTablet := clusterInstance.Keyspaces[0].Shards[0].Rdonly() err = clusterInstance.VtctlclientChangeTabletType(rdOnlyTablet, topodata.TabletType_SPARE) require.NoError(t, err) + // Change it back to RDONLY afterward as the cluster is re-used. + defer func() { + err = clusterInstance.VtctlclientChangeTabletType(rdOnlyTablet, topodata.TabletType_RDONLY) + require.NoError(t, err) + }() // Only returns rows for REPLICA and RDONLY tablets -- so should be 1 of them since we updated 1 to spare qr = utils.Exec(t, conn, "show vitess_replication_status like '%'") diff --git a/go/test/endtoend/utils/cmp.go b/go/test/endtoend/utils/cmp.go index 2039599f714..44975490022 100644 --- a/go/test/endtoend/utils/cmp.go +++ b/go/test/endtoend/utils/cmp.go @@ -19,10 +19,11 @@ package utils import ( "context" "fmt" + "testing" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "testing" "vitess.io/vitess/go/mysql" "vitess.io/vitess/go/sqltypes" @@ -73,6 +74,12 @@ func (mcmp *MySQLCompare) AssertMatches(query, expected string) { } } +// SkipIfBinaryIsBelowVersion should be used instead of using utils.SkipIfBinaryIsBelowVersion(t, +// This is because we might be inside a Run block that has a different `t` variable +func (mcmp *MySQLCompare) SkipIfBinaryIsBelowVersion(majorVersion int, binary string) { + SkipIfBinaryIsBelowVersion(mcmp.t, majorVersion, binary) +} + // AssertMatchesAny ensures the given query produces any one of the expected results. func (mcmp *MySQLCompare) AssertMatchesAny(query string, expected ...string) { mcmp.t.Helper() diff --git a/go/test/endtoend/vtgate/queries/aggregation/aggregation_test.go b/go/test/endtoend/vtgate/queries/aggregation/aggregation_test.go index 3fcff33e525..ff0037ba519 100644 --- a/go/test/endtoend/vtgate/queries/aggregation/aggregation_test.go +++ b/go/test/endtoend/vtgate/queries/aggregation/aggregation_test.go @@ -37,10 +37,23 @@ func start(t *testing.T) (utils.MySQLCompare, func()) { deleteAll := func() { _, _ = utils.ExecAllowError(t, mcmp.VtConn, "set workload = oltp") - tables := []string{"t9", "aggr_test", "t3", "t7_xxhash", "aggr_test_dates", "t7_xxhash_idx", "t1", "t2", "t10"} + tables := []string{ + "t3", + "t3_id7_idx", + "t9", + "aggr_test", + "aggr_test_dates", + "t7_xxhash", + "t7_xxhash_idx", + "t1", + "t2", + "t10", + "emp", + "dept", + "bet_logs", + } for _, table := range tables { - _, err = mcmp.ExecAndIgnore("delete from " + table) - fmt.Println(table, err) + _, _ = mcmp.ExecAndIgnore("delete from " + table) } } @@ -54,7 +67,6 @@ func start(t *testing.T) (utils.MySQLCompare, func()) { } func TestAggregateTypes(t *testing.T) { - t.Skip() mcmp, closer := start(t) defer closer() mcmp.Exec("insert into aggr_test(id, val1, val2) values(1,'a',1), (2,'A',1), (3,'b',1), (4,'c',3), (5,'c',4)") @@ -96,7 +108,7 @@ func TestEqualFilterOnScatter(t *testing.T) { workloads := []string{"oltp", "olap"} for _, workload := range workloads { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches("select count(*) as a from aggr_test having 1 = 1", `[[INT64(5)]]`) @@ -181,7 +193,7 @@ func TestNotEqualFilterOnScatter(t *testing.T) { workloads := []string{"oltp", "olap"} for _, workload := range workloads { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches("select count(*) as a from aggr_test having a != 5", `[]`) @@ -205,7 +217,7 @@ func TestLessFilterOnScatter(t *testing.T) { workloads := []string{"oltp", "olap"} for _, workload := range workloads { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches("select count(*) as a from aggr_test having a < 10", `[[INT64(5)]]`) mcmp.AssertMatches("select count(*) as a from aggr_test having 1 < a", `[[INT64(5)]]`) @@ -228,7 +240,7 @@ func TestLessEqualFilterOnScatter(t *testing.T) { workloads := []string{"oltp", "olap"} for _, workload := range workloads { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches("select count(*) as a from aggr_test having a <= 10", `[[INT64(5)]]`) @@ -252,7 +264,7 @@ func TestGreaterFilterOnScatter(t *testing.T) { workloads := []string{"oltp", "olap"} for _, workload := range workloads { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches("select count(*) as a from aggr_test having a > 1", `[[INT64(5)]]`) @@ -276,7 +288,7 @@ func TestGreaterEqualFilterOnScatter(t *testing.T) { workloads := []string{"oltp", "olap"} for _, workload := range workloads { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches("select count(*) as a from aggr_test having a >= 1", `[[INT64(5)]]`) @@ -306,12 +318,13 @@ func TestGroupByOnlyFullGroupByOff(t *testing.T) { } func TestAggOnTopOfLimit(t *testing.T) { + t.Skip() mcmp, closer := start(t) defer closer() mcmp.Exec("insert into aggr_test(id, val1, val2) values(1,'a',6), (2,'a',1), (3,'b',1), (4,'c',3), (5,'c',4), (6,'b',null), (7,null,2), (8,null,null)") for _, workload := range []string{"oltp", "olap"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = '%s'", workload)) mcmp.AssertMatches(" select count(*) from (select id, val1 from aggr_test where val2 < 4 limit 2) as x", "[[INT64(2)]]") mcmp.AssertMatches(" select count(val1) from (select id, val1 from aggr_test where val2 < 4 order by val1 desc limit 2) as x", "[[INT64(2)]]") @@ -344,7 +357,7 @@ func TestEmptyTableAggr(t *testing.T) { defer closer() for _, workload := range []string{"oltp", "olap"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = %s", workload)) mcmp.AssertMatches(" select count(*) from t1 inner join t2 on (t1.t1_id = t2.id) where t1.value = 'foo'", "[[INT64(0)]]") mcmp.AssertMatches(" select count(*) from t2 inner join t1 on (t1.t1_id = t2.id) where t1.value = 'foo'", "[[INT64(0)]]") @@ -356,7 +369,7 @@ func TestEmptyTableAggr(t *testing.T) { mcmp.Exec("insert into t1(t1_id, `name`, `value`, shardkey) values(1,'a1','foo',100), (2,'b1','foo',200), (3,'c1','foo',300), (4,'a1','foo',100), (5,'b1','bar',200)") for _, workload := range []string{"oltp", "olap"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = %s", workload)) mcmp.AssertMatches(" select count(*) from t1 inner join t2 on (t1.t1_id = t2.id) where t1.value = 'foo'", "[[INT64(0)]]") mcmp.AssertMatches(" select count(*) from t2 inner join t1 on (t1.t1_id = t2.id) where t1.value = 'foo'", "[[INT64(0)]]") @@ -491,6 +504,27 @@ func TestComplexAggregation(t *testing.T) { mcmp.Exec(`SELECT COUNT(*)+shardkey+MIN(t1_id)+1+MAX(t1_id)*SUM(t1_id)+1+name FROM t1 GROUP BY shardkey, name`) } +func TestJoinAggregation(t *testing.T) { + // This is new functionality in Vitess 20 + utils.SkipIfBinaryIsBelowVersion(t, 20, "vtgate") + + mcmp, closer := start(t) + defer closer() + + mcmp.Exec("insert into t1(t1_id, `name`, `value`, shardkey) values(1,'a1','foo',100), (2,'b1','foo',200), (3,'c1','foo',300), (4,'a1','foo',100), (5,'d1','toto',200), (6,'c1','tata',893), (7,'a1','titi',2380), (8,'b1','tete',12833), (9,'e1','yoyo',783493)") + + mcmp.Exec(`insert into bet_logs(id, merchant_game_id, bet_amount, game_id) values + (1, 1, 22.5, 40), (2, 1, 15.3, 40), + (3, 2, 22.5, 40), (4, 2, 15.3, 40), + (5, 3, 22.5, 40), (6, 3, 15.3, 40), + (7, 3, 22.5, 40), (8, 4, 15.3, 40) +`) + + mcmp.Exec("set @@sql_mode = ' '") + mcmp.Exec(`SELECT t1.name, SUM(b.bet_amount) AS bet_amount FROM bet_logs as b LEFT JOIN t1 ON b.merchant_game_id = t1.t1_id GROUP BY b.merchant_game_id`) + mcmp.Exec(`SELECT t1.name, CAST(SUM(b.bet_amount) AS DECIMAL(20,6)) AS bet_amount FROM bet_logs as b LEFT JOIN t1 ON b.merchant_game_id = t1.t1_id GROUP BY b.merchant_game_id`) +} + // TestGroupConcatAggregation tests the group_concat function with vitess doing the aggregation. func TestGroupConcatAggregation(t *testing.T) { utils.SkipIfBinaryIsBelowVersion(t, 18, "vtgate") @@ -576,3 +610,83 @@ func TestDistinctAggregation(t *testing.T) { }) } } + +func TestHavingQueries(t *testing.T) { + t.Skip() + utils.SkipIfBinaryIsBelowVersion(t, 18, "vtgate") + mcmp, closer := start(t) + defer closer() + + inserts := []string{ + `INSERT INTO emp (empno, ename, job, mgr, hiredate, sal, comm, deptno) VALUES + (1, 'John', 'Manager', NULL, '2022-01-01', 5000, 500, 1), + (2, 'Doe', 'Analyst', 1, '2023-01-01', 4500, NULL, 1), + (3, 'Jane', 'Clerk', 1, '2023-02-01', 3000, 200, 2), + (4, 'Mary', 'Analyst', 2, '2022-03-01', 4700, NULL, 1), + (5, 'Smith', 'Salesman', 3, '2023-01-15', 3200, 300, 3)`, + "INSERT INTO dept (deptno, dname, loc) VALUES (1, 'IT', 'New York'), (2, 'HR', 'London'), (3, 'Sales', 'San Francisco')", + "INSERT INTO t1 (t1_id, name, value, shardKey) VALUES (1, 'Name1', 'Value1', 100), (2, 'Name2', 'Value2', 100), (3, 'Name1', 'Value3', 200)", + "INSERT INTO aggr_test_dates (id, val1, val2) VALUES (1, '2023-01-01', '2023-01-02'), (2, '2023-02-01', '2023-02-02'), (3, '2023-03-01', '2023-03-02')", + "INSERT INTO t10 (k, a, b) VALUES (1, 10, 20), (2, 30, 40), (3, 50, 60)", + "INSERT INTO t3 (id5, id6, id7) VALUES (1, 10, 100), (2, 20, 200), (3, 30, 300)", + "INSERT INTO t9 (id1, id2, id3) VALUES (1, 'A1', 'B1'), (2, 'A2', 'B2'), (3, 'A1', 'B3')", + "INSERT INTO aggr_test (id, val1, val2) VALUES (1, 'Test1', 100), (2, 'Test2', 200), (3, 'Test1', 300), (4, 'Test3', 400)", + "INSERT INTO t2 (id, shardKey) VALUES (1, 100), (2, 200), (3, 300)", + `INSERT INTO bet_logs (id, merchant_game_id, bet_amount, game_id) VALUES + (1, 1, 100.0, 10), + (2, 1, 200.0, 11), + (3, 2, 300.0, 10), + (4, 3, 400.0, 12)`, + } + + for _, insert := range inserts { + mcmp.Exec(insert) + } + + queries := []string{ + // The following queries are not allowed by MySQL but Vitess allows them + // SELECT ename FROM emp GROUP BY ename HAVING sal > 5000 + // SELECT val1, COUNT(val2) FROM aggr_test_dates GROUP BY val1 HAVING val2 > 5 + // SELECT k, a FROM t10 GROUP BY k HAVING b > 2 + // SELECT loc FROM dept GROUP BY loc HAVING COUNT(deptno) AND dname = 'Sales' + // SELECT AVG(val2) AS average_val2 FROM aggr_test HAVING val1 = 'Test' + + // these first queries are all failing in different ways. let's check that Vitess also fails + + "SELECT deptno, AVG(sal) AS average_salary HAVING average_salary > 5000 FROM emp", + "SELECT job, COUNT(empno) AS num_employees FROM emp HAVING num_employees > 2", + "SELECT dname, SUM(sal) FROM dept JOIN emp ON dept.deptno = emp.deptno HAVING AVG(sal) > 6000", + "SELECT COUNT(*) AS count FROM emp WHERE count > 5", + "SELECT empno, MAX(sal) FROM emp HAVING COUNT(*) > 3", + "SELECT id, SUM(bet_amount) AS total_bets FROM bet_logs HAVING total_bets > 1000", + "SELECT merchant_game_id FROM bet_logs GROUP BY merchant_game_id HAVING SUM(bet_amount)", + "SELECT shardKey, COUNT(id) FROM t2 HAVING shardKey > 100", + "SELECT deptno FROM emp GROUP BY deptno HAVING MAX(hiredate) > '2020-01-01'", + + // These queries should not fail + "SELECT deptno, COUNT(*) AS num_employees FROM emp GROUP BY deptno HAVING num_employees > 5", + "SELECT ename, SUM(sal) FROM emp GROUP BY ename HAVING SUM(sal) > 10000", + "SELECT dname, MAX(sal) AS max_salary FROM emp JOIN dept ON emp.deptno = dept.deptno GROUP BY dname HAVING max_salary < 10000", + "SELECT YEAR(hiredate) AS year, COUNT(*) FROM emp GROUP BY year HAVING COUNT(*) > 2", + "SELECT mgr, COUNT(empno) AS managed_employees FROM emp WHERE mgr IS NOT NULL GROUP BY mgr HAVING managed_employees >= 3", + "SELECT deptno, SUM(comm) AS total_comm FROM emp GROUP BY deptno HAVING total_comm > AVG(total_comm)", + "SELECT id2, COUNT(*) AS count FROM t9 GROUP BY id2 HAVING count > 1", + "SELECT val1, COUNT(*) FROM aggr_test GROUP BY val1 HAVING COUNT(*) > 1", + "SELECT DATE(val1) AS date, SUM(val2) FROM aggr_test_dates GROUP BY date HAVING SUM(val2) > 100", + "SELECT shardKey, AVG(`value`) FROM t1 WHERE `value` IS NOT NULL GROUP BY shardKey HAVING AVG(`value`) > 10", + "SELECT job, COUNT(*) AS job_count FROM emp GROUP BY job HAVING job_count > 3", + "SELECT merchant_game_id, SUM(bet_amount) AS total_bets FROM bet_logs GROUP BY merchant_game_id HAVING total_bets > 1000", + "SELECT loc, COUNT(deptno) AS num_depts FROM dept GROUP BY loc HAVING num_depts > 1", + "SELECT `name`, COUNT(*) AS name_count FROM t1 GROUP BY `name` HAVING name_count > 2", + "SELECT COUNT(*) AS num_jobs FROM emp GROUP BY empno HAVING num_jobs > 1", + "SELECT id, COUNT(*) AS count FROM t2 GROUP BY id HAVING count > 1", + "SELECT val2, SUM(id) FROM aggr_test GROUP BY val2 HAVING SUM(id) > 10", + "SELECT game_id, COUNT(*) AS num_logs FROM bet_logs GROUP BY game_id HAVING num_logs > 5", + } + + for _, query := range queries { + mcmp.Run(query, func(mcmp *utils.MySQLCompare) { + mcmp.ExecAllowAndCompareError(query) + }) + } +} diff --git a/go/test/endtoend/vtgate/queries/aggregation/schema.sql b/go/test/endtoend/vtgate/queries/aggregation/schema.sql index e1489b4bd21..49956b98302 100644 --- a/go/test/endtoend/vtgate/queries/aggregation/schema.sql +++ b/go/test/endtoend/vtgate/queries/aggregation/schema.sql @@ -96,4 +96,12 @@ CREATE TABLE dept ( loc VARCHAR(13), PRIMARY KEY (deptno) ) Engine = InnoDB - COLLATE = utf8mb4_general_ci; \ No newline at end of file + COLLATE = utf8mb4_general_ci; + +CREATE TABLE bet_logs ( + id bigint unsigned NOT NULL, + merchant_game_id bigint unsigned NOT NULL, + bet_amount DECIMAL(20, 8), + game_id bigint, + PRIMARY KEY (id) +) ENGINE InnoDB; diff --git a/go/test/endtoend/vtgate/queries/aggregation/vschema.json b/go/test/endtoend/vtgate/queries/aggregation/vschema.json index 050202aed81..6c3cddf4436 100644 --- a/go/test/endtoend/vtgate/queries/aggregation/vschema.json +++ b/go/test/endtoend/vtgate/queries/aggregation/vschema.json @@ -147,6 +147,14 @@ "name": "hash" } ] + }, + "bet_logs": { + "column_vindexes": [ + { + "column": "id", + "name": "hash" + } + ] } } } \ No newline at end of file diff --git a/go/test/endtoend/vtgate/queries/dml/insert_test.go b/go/test/endtoend/vtgate/queries/dml/insert_test.go index 80d0602b898..1d09d3aab51 100644 --- a/go/test/endtoend/vtgate/queries/dml/insert_test.go +++ b/go/test/endtoend/vtgate/queries/dml/insert_test.go @@ -38,7 +38,7 @@ func TestSimpleInsertSelect(t *testing.T) { mcmp.Exec("insert into u_tbl(id, num) values (1,2),(3,4)") for i, mode := range []string{"oltp", "olap"} { - t.Run(mode, func(t *testing.T) { + mcmp.Run(mode, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = %s", mode)) qr := mcmp.Exec(fmt.Sprintf("insert into s_tbl(id, num) select id*%d, num*%d from s_tbl where id < 10", 10+i, 20+i)) @@ -65,7 +65,7 @@ func TestFailureInsertSelect(t *testing.T) { mcmp.Exec("insert into u_tbl(id, num) values (1,2),(3,4)") for _, mode := range []string{"oltp", "olap"} { - t.Run(mode, func(t *testing.T) { + mcmp.Run(mode, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = %s", mode)) // primary key same @@ -386,7 +386,7 @@ func TestInsertSelectUnshardedUsingSharded(t *testing.T) { mcmp.Exec("insert into s_tbl(id, num) values (1,2),(3,4)") for _, mode := range []string{"oltp", "olap"} { - t.Run(mode, func(t *testing.T) { + mcmp.Run(mode, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = %s", mode)) qr := mcmp.Exec("insert into u_tbl(id, num) select id, num from s_tbl where s_tbl.id in (1,3)") assert.EqualValues(t, 2, qr.RowsAffected) diff --git a/go/test/endtoend/vtgate/queries/lookup_queries/main_test.go b/go/test/endtoend/vtgate/queries/lookup_queries/main_test.go index c385941502a..25bf78437da 100644 --- a/go/test/endtoend/vtgate/queries/lookup_queries/main_test.go +++ b/go/test/endtoend/vtgate/queries/lookup_queries/main_test.go @@ -134,7 +134,7 @@ func TestLookupQueries(t *testing.T) { (3, 'monkey', 'monkey')`) for _, workload := range []string{"olap", "oltp"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, "set workload = "+workload) mcmp.AssertMatches("select id from user where lookup = 'apa'", "[[INT64(1)] [INT64(2)]]") diff --git a/go/test/endtoend/vtgate/queries/misc/misc_test.go b/go/test/endtoend/vtgate/queries/misc/misc_test.go index 0d165854fa4..b33d1d43c92 100644 --- a/go/test/endtoend/vtgate/queries/misc/misc_test.go +++ b/go/test/endtoend/vtgate/queries/misc/misc_test.go @@ -322,7 +322,7 @@ func TestAnalyze(t *testing.T) { defer closer() for _, workload := range []string{"olap", "oltp"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, fmt.Sprintf("set workload = %s", workload)) utils.Exec(t, mcmp.VtConn, "analyze table t1") utils.Exec(t, mcmp.VtConn, "analyze table uks.unsharded") diff --git a/go/test/endtoend/vtgate/queries/orderby/orderby_test.go b/go/test/endtoend/vtgate/queries/orderby/orderby_test.go index 445a4d5a32f..09607bbd767 100644 --- a/go/test/endtoend/vtgate/queries/orderby/orderby_test.go +++ b/go/test/endtoend/vtgate/queries/orderby/orderby_test.go @@ -83,3 +83,73 @@ func TestOrderBy(t *testing.T) { mcmp.AssertMatches("select /*vt+ PLANNER=Gen4 */ id1, id2 from t4 order by reverse(id2) desc", `[[INT64(5) VARCHAR("test")] [INT64(8) VARCHAR("F")] [INT64(7) VARCHAR("e")] [INT64(6) VARCHAR("d")] [INT64(2) VARCHAR("Abc")] [INT64(4) VARCHAR("c")] [INT64(3) VARCHAR("b")] [INT64(1) VARCHAR("a")]]`) } } + +func TestOrderByComplex(t *testing.T) { + // tests written to try to trick the ORDER BY engine and planner + utils.SkipIfBinaryIsBelowVersion(t, 20, "vtgate") + + mcmp, closer := start(t) + defer closer() + + mcmp.Exec("insert into user(id, col, email) values(1,1,'a'), (2,2,'Abc'), (3,3,'b'), (4,4,'c'), (5,2,'test'), (6,1,'test'), (7,2,'a'), (8,3,'b'), (9,4,'c3'), (10,2,'d')") + + queries := []string{ + "select email, max(col) from user group by email order by col", + "select email, max(col) from user group by email order by col + 1", + "select email, max(col) from user group by email order by max(col)", + "select email, max(col) from user group by email order by max(col) + 1", + "select email, max(col) from user group by email order by min(col)", + "select email, max(col) as col from user group by email order by col", + "select email, max(col) as col from user group by email order by max(col)", + "select email, max(col) as col from user group by email order by col + 1", + "select email, max(col) as col from user group by email order by email + col", + "select email, max(col) as col from user group by email order by email + max(col)", + "select email, max(col) as col from user group by email order by email, col", + "select email, max(col) as xyz from user group by email order by email, xyz", + "select email, max(col) as xyz from user group by email order by email, max(xyz)", + "select email, max(col) as xyz from user group by email order by email, abs(xyz)", + "select email, max(col) as xyz from user group by email order by email, max(col)", + "select email, max(col) as xyz from user group by email order by email, abs(col)", + "select email, max(col) as xyz from user group by email order by xyz + email", + "select email, max(col) as xyz from user group by email order by abs(xyz) + email", + "select email, max(col) as xyz from user group by email order by abs(xyz)", + "select email, max(col) as xyz from user group by email order by abs(col)", + "select email, max(col) as max_col from user group by email order by max_col desc, length(email)", + "select email, max(col) as max_col, min(col) as min_col from user group by email order by max_col - min_col", + "select email, max(col) as col1, count(*) as col2 from user group by email order by col2 * col1", + "select email, sum(col) as sum_col from user group by email having sum_col > 10 order by sum_col / count(email)", + "select email, max(col) as max_col, char_length(email) as len_email from user group by email order by len_email, max_col desc", + "select email, max(col) as col_alias from user group by email order by case when col_alias > 100 then 0 else 1 end, col_alias", + "select email, count(*) as cnt, max(col) as max_col from user group by email order by cnt desc, max_col + cnt", + "select email, max(col) as max_col from user group by email order by if(max_col > 50, max_col, -max_col) desc", + "select email, max(col) as col, sum(col) as sum_col from user group by email order by col * sum_col desc", + "select email, max(col) as col, (select min(col) from user as u2 where u2.email = user.email) as min_col from user group by email order by col - min_col", + "select email, max(col) as max_col, (max(col) % 10) as mod_col from user group by email order by mod_col, max_col", + "select email, max(col) as 'value', count(email) as 'number' from user group by email order by 'number', 'value'", + "select email, max(col) as col, concat('email: ', email, ' col: ', max(col)) as complex_alias from user group by email order by complex_alias desc", + "select email, max(col) as max_col from user group by email union select email, min(col) as min_col from user group by email order by email", + "select email, max(col) as col from user where col > 50 group by email order by col desc", + "select email, max(col) as col from user group by email order by length(email), col", + "select email, max(col) as max_col, substring(email, 1, 3) as sub_email from user group by email order by sub_email, max_col desc", + "select email, max(col) as max_col from user group by email order by reverse(email), max_col", + "select email, max(col) as max_col from user group by email having max_col > avg(max_col) order by max_col desc", + "select email, count(*) as count, max(col) as max_col from user group by email order by count * max_col desc", + "select email, max(col) as col_alias from user group by email order by col_alias limit 10", + "select email, max(col) as col from user group by email order by col desc, email", + "select concat(email, ' ', max(col)) as combined from user group by email order by combined desc", + "select email, max(col) as max_col from user group by email order by ascii(email), max_col", + "select email, char_length(email) as email_length, max(col) as max_col from user group by email order by email_length desc, max_col", + "select email, max(col) as col from user group by email having col between 10 and 100 order by col", + "select email, max(col) as max_col, min(col) as min_col from user group by email order by max_col + min_col desc", + "select email, max(col) as 'max', count(*) as 'count' from user group by email order by 'max' desc, 'count'", + "select email, max(col) as max_col from (select email, col from user where col > 20) as filtered group by email order by max_col", + "select a.email, a.max_col from (select email, max(col) as max_col from user group by email) as a order by a.max_col desc", + "select email, max(col) as max_col from user where email like 'a%' group by email order by max_col, email", + } + + for _, query := range queries { + mcmp.Run(query, func(mcmp *utils.MySQLCompare) { + _, _ = mcmp.ExecAllowAndCompareError(query) + }) + } +} diff --git a/go/test/endtoend/vtgate/queries/orderby/schema.sql b/go/test/endtoend/vtgate/queries/orderby/schema.sql index 8f0131db357..0980d845c3d 100644 --- a/go/test/endtoend/vtgate/queries/orderby/schema.sql +++ b/go/test/endtoend/vtgate/queries/orderby/schema.sql @@ -27,3 +27,12 @@ create table t4_id2_idx ) Engine = InnoDB DEFAULT charset = utf8mb4 COLLATE = utf8mb4_general_ci; + +create table user +( + id bigint primary key, + col bigint, + email varchar(20) +) Engine = InnoDB + DEFAULT charset = utf8mb4 + COLLATE = utf8mb4_general_ci; diff --git a/go/test/endtoend/vtgate/queries/orderby/vschema.json b/go/test/endtoend/vtgate/queries/orderby/vschema.json index 14418850a35..d5ce50cc68d 100644 --- a/go/test/endtoend/vtgate/queries/orderby/vschema.json +++ b/go/test/endtoend/vtgate/queries/orderby/vschema.json @@ -4,7 +4,7 @@ "hash": { "type": "hash" }, - "unicode_loose_md5" : { + "unicode_loose_md5": { "type": "unicode_loose_md5" }, "t1_id2_vdx": { @@ -54,7 +54,10 @@ "name": "hash" }, { - "columns": ["id2", "id1"], + "columns": [ + "id2", + "id1" + ], "name": "t4_id2_vdx" } ] @@ -67,5 +70,13 @@ } ] } + }, + "user": { + "column_vindexes": [ + { + "column": "id", + "name": "hash" + } + ] } } \ No newline at end of file diff --git a/go/test/endtoend/vtgate/queries/union/union_test.go b/go/test/endtoend/vtgate/queries/union/union_test.go index 52aa94bda51..2f287fd7b9c 100644 --- a/go/test/endtoend/vtgate/queries/union/union_test.go +++ b/go/test/endtoend/vtgate/queries/union/union_test.go @@ -57,7 +57,7 @@ func TestUnionDistinct(t *testing.T) { mcmp.Exec("insert into t2(id3, id4) values (2, 3), (3, 4), (4,4), (5,5)") for _, workload := range []string{"oltp", "olap"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, "set workload = "+workload) mcmp.AssertMatches("select 1 union select null", "[[INT64(1)] [NULL]]") mcmp.AssertMatches("select null union select null", "[[NULL]]") @@ -69,10 +69,16 @@ func TestUnionDistinct(t *testing.T) { mcmp.AssertMatchesNoOrder("select id1 from t1 where id1 = 1 union select 452 union select id1 from t1 where id1 = 4", "[[INT64(1)] [INT64(452)] [INT64(4)]]") mcmp.AssertMatchesNoOrder("select id1, id2 from t1 union select 827, 452 union select id3,id4 from t2", "[[INT64(4) INT64(4)] [INT64(1) INT64(1)] [INT64(2) INT64(2)] [INT64(3) INT64(3)] [INT64(827) INT64(452)] [INT64(2) INT64(3)] [INT64(3) INT64(4)] [INT64(5) INT64(5)]]") - t.Run("skipped for now", func(t *testing.T) { - t.Skip() - mcmp.AssertMatches("select 1 from dual where 1 IN (select 1 as col union select 2)", "[[INT64(1)]]") - }) + mcmp.AssertMatches("select 1 from dual where 1 IN (select 1 as col union select 2)", "[[INT64(1)]]") + mcmp.AssertMatches(`SELECT 1 from t1 UNION SELECT 2 from t1`, `[[INT64(1)] [INT64(2)]]`) + mcmp.AssertMatches(`SELECT 5 from t1 UNION SELECT 6 from t1`, `[[INT64(5)] [INT64(6)]]`) + mcmp.AssertMatchesNoOrder(`SELECT id1 from t1 UNION SELECT id2 from t1`, `[[INT64(1)] [INT64(2)] [INT64(3)] [INT64(4)]]`) + mcmp.AssertMatchesNoOrder(`SELECT 1 from t1 UNION SELECT id2 from t1`, `[[INT64(1)] [INT64(2)] [INT64(3)] [INT64(4)]]`) + mcmp.AssertMatchesNoOrder(`SELECT 5 from t1 UNION SELECT id2 from t1`, `[[INT64(5)] [INT64(1)] [INT64(2)] [INT64(3)] [INT64(4)]]`) + mcmp.AssertMatchesNoOrder(`SELECT id1 from t1 UNION SELECT 2 from t1`, `[[INT64(1)] [INT64(2)] [INT64(3)] [INT64(4)]]`) + mcmp.AssertMatchesNoOrder(`SELECT id1 from t1 UNION SELECT 5 from t1`, `[[INT64(1)] [INT64(2)] [INT64(3)] [INT64(4)] [INT64(5)]]`) + mcmp.Exec(`select curdate() from t1 union select 3 from t1`) + mcmp.Exec(`select curdate() from t1 union select id1 from t1`) }) } @@ -86,7 +92,7 @@ func TestUnionAll(t *testing.T) { mcmp.Exec("insert into t2(id3, id4) values(3, 3), (4, 4)") for _, workload := range []string{"oltp", "olap"} { - t.Run(workload, func(t *testing.T) { + mcmp.Run(workload, func(mcmp *utils.MySQLCompare) { utils.Exec(t, mcmp.VtConn, "set workload = "+workload) // union all between two selectuniqueequal mcmp.AssertMatches("select id1 from t1 where id1 = 1 union all select id1 from t1 where id1 = 4", "[[INT64(1)]]") diff --git a/go/test/endtoend/vtgate/split_table/table_ddl_test.go b/go/test/endtoend/vtgate/split_table/table_ddl_test.go index e6efded4098..ea8ff5f82f1 100644 --- a/go/test/endtoend/vtgate/split_table/table_ddl_test.go +++ b/go/test/endtoend/vtgate/split_table/table_ddl_test.go @@ -5,6 +5,7 @@ import ( "strings" "testing" "time" + "vitess.io/vitess/go/vt/vtgate" "github.com/stretchr/testify/assert" diff --git a/go/test/endtoend/vtgate/split_table/table_select_groupby_test.go b/go/test/endtoend/vtgate/split_table/table_select_groupby_test.go index 1f2f5423c02..a8130c214e5 100644 --- a/go/test/endtoend/vtgate/split_table/table_select_groupby_test.go +++ b/go/test/endtoend/vtgate/split_table/table_select_groupby_test.go @@ -161,14 +161,13 @@ func TestTableAggrCases(t *testing.T) { // scatter aggregate multiple group by (numbers) mcmp.ExecWithColumnCompareAndNotEmpty("select a, b, count(*) from t_user group by 2, 1") // scatter aggregate group by aggregate function - _, err = mcmp.ExecAndIgnore("select count(*) b from t_user group by b") - require.ErrorContains(t, err, "VT03005: cannot group on 'count(*)'") + mcmp.ExecWithColumnCompareAndNotEmpty("select count(*) b from t_user group by b") // scatter aggregate multiple group by columns inverse order mcmp.ExecWithColumnCompareAndNotEmpty("select a, b, count(*) from t_user group by b, a") // scatter aggregate group by column number mcmp.ExecWithColumnCompareAndNotEmpty("select col from t_user group by 1") // scatter aggregate group by invalid column number - mcmp.AssertContainsError("select col from t_user group by 2", "Unknown column '2' in 'group statement'") + mcmp.AssertContainsError("select col from t_user group by 2", "Unknown column '2' in 'group clause'") // scatter aggregate with numbered order by columns mcmp.ExecWithColumnCompareAndNotEmpty("select a, b, c, d, count(*) from t_user group by 1, 2, 3 order by 1, 2, 3") // scatter aggregate with named order by columns @@ -193,7 +192,7 @@ func TestTableAggrCases(t *testing.T) { // [INT64(1)] //mcmp.ExecWithColumnCompareAndNotEmpty("select id from t_user group by 1.1") // Group by out of range column number (code is duplicated from symab). - mcmp.AssertContainsError("select id from t_user group by 2", "Unknown column '2' in 'group statement'") + mcmp.AssertContainsError("select id from t_user group by 2", "Unknown column '2' in 'group clause'") // aggregate query with order by aggregate column along with NULL mcmp.ExecWithColumnCompareAndNotEmpty("select col, count(*) k from t_user group by col order by null, k") // aggregate query with order by NULL diff --git a/go/test/endtoend/vtgate/split_table/table_select_limit_test.go b/go/test/endtoend/vtgate/split_table/table_select_limit_test.go index 4da4d808c8c..7da2199b48f 100644 --- a/go/test/endtoend/vtgate/split_table/table_select_limit_test.go +++ b/go/test/endtoend/vtgate/split_table/table_select_limit_test.go @@ -25,7 +25,7 @@ func TestLimit(t *testing.T) { //mcmp.AssertMatches("select count(*) from (select id,col from t_user where col='a' limit 2) as x", "[[INT64(1)]]") mcmp.ExecWithColumnCompare("select count(*) from (select id,col from t_user where col='a' limit 2) as x") //mcmp.AssertMatches("select count(col) from (select id,col from t_user where col='a' order by col desc limit 2) as x", "[[INT64(1)]]") - mcmp.ExecWithColumnCompare("select count(col) from (select id,col from t_user where col='a' order by col desc limit 2) as x") + //mcmp.ExecWithColumnCompare("select count(col) from (select id,col from t_user where col='a' order by col desc limit 2) as x") //mcmp.AssertMatches("select count(col) from (select id,col from t_user where col is not null limit 2) as x", "[[INT64(2)]]") mcmp.ExecWithColumnCompare("select count(col) from (select id,col from t_user where col is not null limit 2) as x") //mcmp.AssertMatches("select count(id) from (select id,col from t_user where col is not null limit 2) as x", "[[INT64(2)]]") diff --git a/go/test/endtoend/vtorc/general/vtorc_test.go b/go/test/endtoend/vtorc/general/vtorc_test.go index adce77d38b4..cff41bec232 100644 --- a/go/test/endtoend/vtorc/general/vtorc_test.go +++ b/go/test/endtoend/vtorc/general/vtorc_test.go @@ -189,9 +189,13 @@ func TestVTOrcRepairs(t *testing.T) { t.Run("ReplicationFromOtherReplica", func(t *testing.T) { // point replica at otherReplica - changeReplicationSourceCommand := fmt.Sprintf("STOP SLAVE; RESET SLAVE ALL;"+ - "CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1; START SLAVE", utils.Hostname, otherReplica.MySQLPort) - _, err := utils.RunSQL(t, changeReplicationSourceCommand, replica, "") + changeReplicationSourceCommands := []string{ + "STOP SLAVE", + "RESET SLAVE ALL", + fmt.Sprintf("CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1", utils.Hostname, otherReplica.MySQLPort), + "START SLAVE", + } + err := utils.RunSQLs(t, changeReplicationSourceCommands, replica, "") require.NoError(t, err) // wait until the source port is set back correctly by vtorc @@ -204,10 +208,13 @@ func TestVTOrcRepairs(t *testing.T) { t.Run("CircularReplication", func(t *testing.T) { // change the replication source on the primary - changeReplicationSourceCommands := fmt.Sprintf("STOP SLAVE; RESET SLAVE ALL;"+ - "CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1;"+ - "START SLAVE;", replica.VttabletProcess.TabletHostname, replica.MySQLPort) - _, err := utils.RunSQL(t, changeReplicationSourceCommands, curPrimary, "") + changeReplicationSourceCommands := []string{ + "STOP SLAVE", + "RESET SLAVE ALL", + fmt.Sprintf("CHANGE MASTER TO MASTER_HOST='%s', MASTER_PORT=%d, MASTER_USER='vt_repl', MASTER_AUTO_POSITION = 1", replica.VttabletProcess.TabletHostname, replica.MySQLPort), + "START SLAVE", + } + err := utils.RunSQLs(t, changeReplicationSourceCommands, curPrimary, "") require.NoError(t, err) // wait for curPrimary to reach stable state diff --git a/go/test/endtoend/vtorc/utils/utils.go b/go/test/endtoend/vtorc/utils/utils.go index 0a8a5c6fb2e..485b7a3005c 100644 --- a/go/test/endtoend/vtorc/utils/utils.go +++ b/go/test/endtoend/vtorc/utils/utils.go @@ -1121,3 +1121,19 @@ func PrintVTOrcLogsOnFailure(t *testing.T, clusterInstance *cluster.LocalProcess log.Errorf("%s", string(content)) } } + +// EnableGlobalRecoveries enables global recoveries for the given VTOrc. +func EnableGlobalRecoveries(t *testing.T, vtorc *cluster.VTOrcProcess) { + status, resp, err := MakeAPICall(t, vtorc, "/api/enable-global-recoveries") + require.NoError(t, err) + assert.Equal(t, 200, status) + assert.Equal(t, "Global recoveries enabled\n", resp) +} + +// DisableGlobalRecoveries disables global recoveries for the given VTOrc. +func DisableGlobalRecoveries(t *testing.T, vtorc *cluster.VTOrcProcess) { + status, resp, err := MakeAPICall(t, vtorc, "/api/disable-global-recoveries") + require.NoError(t, err) + assert.Equal(t, 200, status) + assert.Equal(t, "Global recoveries disabled\n", resp) +} diff --git a/go/vt/discovery/keyspace_events.go b/go/vt/discovery/keyspace_events.go index 163f240de8c..014284ed5ee 100644 --- a/go/vt/discovery/keyspace_events.go +++ b/go/vt/discovery/keyspace_events.go @@ -391,8 +391,7 @@ func (kss *keyspaceState) getMoveTablesStatus(vs *vschemapb.SrvVSchema) (*MoveTa } // if there are no routing rules defined, then movetables is not in progress, exit early - if (vs.RoutingRules != nil && len(vs.RoutingRules.Rules) == 0) && - (vs.ShardRoutingRules != nil && len(vs.ShardRoutingRules.Rules) == 0) { + if len(vs.GetRoutingRules().GetRules()) == 0 && len(vs.GetShardRoutingRules().GetRules()) == 0 { return mtState, nil } @@ -529,6 +528,11 @@ func (kss *keyspaceState) isServing() bool { // In addition, the traffic switcher updates SrvVSchema when the DeniedTables attributes in a Shard record is // modified. func (kss *keyspaceState) onSrvVSchema(vs *vschemapb.SrvVSchema, err error) bool { + // the vschema can be nil if the server is currently shutting down + if vs == nil { + return true + } + kss.mu.Lock() defer kss.mu.Unlock() kss.moveTablesState, _ = kss.getMoveTablesStatus(vs) diff --git a/go/vt/env/env.go b/go/vt/env/env.go index 70feb43186c..186f81cd585 100644 --- a/go/vt/env/env.go +++ b/go/vt/env/env.go @@ -18,7 +18,6 @@ package env import ( "errors" - "fmt" "os" "os/exec" "path" @@ -30,9 +29,12 @@ const ( // DefaultVtDataRoot is the default value for VTROOT environment variable DefaultVtDataRoot = "/vt" // DefaultVtRoot is only required for hooks - DefaultVtRoot = "/usr/local/vitess" + DefaultVtRoot = "/usr/local/vitess" + mysqldSbinPath = "/usr/sbin/mysqld" ) +var errMysqldNotFound = errors.New("VT_MYSQL_ROOT is not set and no mysqld could be found in your PATH") + // VtRoot returns $VTROOT or tries to guess its value if it's not set. // This is the root for the 'vt' distribution, which contains bin/vttablet // for instance. @@ -64,25 +66,30 @@ func VtDataRoot() string { } // VtMysqlRoot returns the root for the mysql distribution, -// which contains bin/mysql CLI for instance. -// If it is not set, look for mysqld in the path. +// which contains the bin/mysql CLI for instance. +// If $VT_MYSQL_ROOT is not set, look for mysqld in the $PATH. func VtMysqlRoot() (string, error) { - // if the environment variable is set, use that + // If the environment variable is set, use that. if root := os.Getenv("VT_MYSQL_ROOT"); root != "" { return root, nil } - // otherwise let's look for mysqld in the PATH. - // ensure that /usr/sbin is included, as it might not be by default - // This is the default location for mysqld from packages. - newPath := fmt.Sprintf("/usr/sbin:%s", os.Getenv("PATH")) - os.Setenv("PATH", newPath) - path, err := exec.LookPath("mysqld") + getRoot := func(path string) string { + return filepath.Dir(filepath.Dir(path)) // Strip mysqld and [s]bin parts + } + binpath, err := exec.LookPath("mysqld") if err != nil { - return "", errors.New("VT_MYSQL_ROOT is not set and no mysqld could be found in your PATH") + // First see if /usr/sbin/mysqld exists as it might not be in + // the PATH by default and this is often the default location + // used by mysqld OS system packages (apt, dnf, etc). + fi, err := os.Stat(mysqldSbinPath) + if err == nil /* file exists */ && fi.Mode().IsRegular() /* not a DIR or other special file */ && + fi.Mode()&0111 != 0 /* executable by anyone */ { + return getRoot(mysqldSbinPath), nil + } + return "", errMysqldNotFound } - path = filepath.Dir(filepath.Dir(path)) // strip mysqld, and the sbin - return path, nil + return getRoot(binpath), nil } // VtMysqlBaseDir returns the Mysql base directory, which diff --git a/go/vt/env/env_test.go b/go/vt/env/env_test.go index 4aa53a25bed..f91cdf94673 100644 --- a/go/vt/env/env_test.go +++ b/go/vt/env/env_test.go @@ -18,7 +18,10 @@ package env import ( "os" + "path/filepath" "testing" + + "github.com/stretchr/testify/require" ) func TestVtDataRoot(t *testing.T) { @@ -43,3 +46,82 @@ func TestVtDataRoot(t *testing.T) { t.Errorf("The value of VtDataRoot should be %v, not %v.", passed, root) } } + +func TestVtMysqlRoot(t *testing.T) { + envVar := "VT_MYSQL_ROOT" + originalMySQLRoot := os.Getenv(envVar) + defer os.Setenv(envVar, originalMySQLRoot) + originalPATH := os.Getenv("PATH") + defer os.Setenv("PATH", originalPATH) + + // The test directory is used to create our fake mysqld binary. + testDir := t.TempDir() // This is automatically cleaned up + createExecutable := func(path string) error { + fullPath := testDir + path + err := os.MkdirAll(filepath.Dir(fullPath), 0755) + require.NoError(t, err) + return os.WriteFile(fullPath, []byte("test"), 0755) + } + + type testcase struct { + name string + preFunc func() error + vtMysqlRootEnvVal string + pathEnvVal string + expect string // The return value we expect from VtMysqlRoot() + expectErr string + } + testcases := []testcase{ + { + name: "VT_MYSQL_ROOT set", + vtMysqlRootEnvVal: "/home/mysql/binaries", + }, + { + name: "VT_MYSQL_ROOT empty; PATH set without /usr/sbin", + pathEnvVal: testDir + filepath.Dir(mysqldSbinPath) + + ":/usr/bin:/sbin:/bin:/usr/local/bin:/usr/local/sbin:/home/mysql/binaries", + preFunc: func() error { + return createExecutable(mysqldSbinPath) + }, + expect: testDir + "/usr", + }, + } + + // If /usr/sbin/mysqld exists, confirm that we find it even + // when /usr/sbin is not in the PATH. + _, err := os.Stat(mysqldSbinPath) + if err == nil { + t.Logf("Found %s, confirming auto detection behavior", mysqldSbinPath) + testcases = append(testcases, testcase{ + name: "VT_MYSQL_ROOT empty; PATH empty; mysqld in /usr/sbin", + expect: "/usr", + }) + } else { + testcases = append(testcases, testcase{ // Error expected + name: "VT_MYSQL_ROOT empty; PATH empty; mysqld not in /usr/sbin", + expectErr: errMysqldNotFound.Error(), + }) + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + if tc.preFunc != nil { + err := tc.preFunc() + require.NoError(t, err) + } + os.Setenv(envVar, tc.vtMysqlRootEnvVal) + os.Setenv("PATH", tc.pathEnvVal) + path, err := VtMysqlRoot() + if tc.expectErr != "" { + require.EqualError(t, err, tc.expectErr) + } else { + require.NoError(t, err) + } + if tc.vtMysqlRootEnvVal != "" { + // This should always be returned. + tc.expect = tc.vtMysqlRootEnvVal + } + require.Equal(t, tc.expect, path) + }) + } +} diff --git a/go/vt/mysqlctl/mysqld.go b/go/vt/mysqlctl/mysqld.go index b8597735b9b..d2882453ab8 100644 --- a/go/vt/mysqlctl/mysqld.go +++ b/go/vt/mysqlctl/mysqld.go @@ -42,27 +42,32 @@ import ( "github.com/spf13/pflag" - "vitess.io/vitess/go/mysql/sqlerror" - "vitess.io/vitess/go/protoutil" - "vitess.io/vitess/config" "vitess.io/vitess/go/mysql" + "vitess.io/vitess/go/mysql/sqlerror" + "vitess.io/vitess/go/protoutil" "vitess.io/vitess/go/sqltypes" "vitess.io/vitess/go/vt/dbconfigs" "vitess.io/vitess/go/vt/dbconnpool" + vtenv "vitess.io/vitess/go/vt/env" "vitess.io/vitess/go/vt/hook" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/mysqlctl/mysqlctlclient" "vitess.io/vitess/go/vt/servenv" "vitess.io/vitess/go/vt/vterrors" - vtenv "vitess.io/vitess/go/vt/env" mysqlctlpb "vitess.io/vitess/go/vt/proto/mysqlctl" - "vitess.io/vitess/go/vt/proto/vtrpc" + vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" ) -var ( +// The string we expect before the MySQL version number +// in strings containing MySQL version information. +const versionStringPrefix = "Ver " +// How many bytes from MySQL error log to sample for error messages +const maxLogFileSampleSize = 4096 + +var ( // DisableActiveReparents is a flag to disable active // reparents for safety reasons. It is used in three places: // 1. in this file to skip registering the commands. @@ -86,15 +91,18 @@ var ( replicationConnectRetry = 10 * time.Second - versionRegex = regexp.MustCompile(`Ver ([0-9]+)\.([0-9]+)\.([0-9]+)`) + versionRegex = regexp.MustCompile(fmt.Sprintf(`%s([0-9]+)\.([0-9]+)\.([0-9]+)`, versionStringPrefix)) + // versionSQLQuery will return a version string directly from + // a MySQL server that is compatible with what we expect from + // mysqld --version and matches the versionRegex. Example + // result: Ver 8.0.35 MySQL Community Server - GPL + versionSQLQuery = fmt.Sprintf("select concat('%s', @@global.version, ' ', @@global.version_comment) as version", + versionStringPrefix) binlogEntryCommittedTimestampRegex = regexp.MustCompile("original_committed_timestamp=([0-9]+)") binlogEntryTimestampGTIDRegexp = regexp.MustCompile(`^#(.+) server id.*\bGTID\b`) ) -// How many bytes from MySQL error log to sample for error messages -const maxLogFileSampleSize = 4096 - // Mysqld is the object that represents a mysqld daemon running on this server. type Mysqld struct { dbcfgs *dbconfigs.DBConfigs @@ -1136,7 +1144,13 @@ func buildLdPaths() ([]string, error) { // GetVersionString is part of the MysqlExecutor interface. func (mysqld *Mysqld) GetVersionString(ctx context.Context) (string, error) { - // Execute as remote action on mysqlctld to ensure we get the actual running MySQL version. + // Try to query the mysqld instance directly. + qr, err := mysqld.FetchSuperQuery(ctx, versionSQLQuery) + if err == nil && len(qr.Rows) == 1 { + return qr.Rows[0][0].ToString(), nil + } + // Execute as remote action on mysqlctld to use the actual running MySQL + // version. if socketFile != "" { client, err := mysqlctlclient.New("unix", socketFile) if err != nil { @@ -1145,6 +1159,7 @@ func (mysqld *Mysqld) GetVersionString(ctx context.Context) (string, error) { defer client.Close() return client.VersionString(ctx) } + // Fall back to the sys exec method using mysqld --version. return GetVersionString() } @@ -1379,7 +1394,7 @@ func (mysqld *Mysqld) scanBinlogTimestamp( // ReadBinlogFilesTimestamps reads all given binlog files via `mysqlbinlog` command and returns the first and last found transaction timestamps func (mysqld *Mysqld) ReadBinlogFilesTimestamps(ctx context.Context, req *mysqlctlpb.ReadBinlogFilesTimestampsRequest) (*mysqlctlpb.ReadBinlogFilesTimestampsResponse, error) { if len(req.BinlogFileNames) == 0 { - return nil, vterrors.Errorf(vtrpc.Code_INVALID_ARGUMENT, "empty binlog list in ReadBinlogFilesTimestampsRequest") + return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "empty binlog list in ReadBinlogFilesTimestampsRequest") } if socketFile != "" { log.Infof("executing Mysqld.ReadBinlogFilesTimestamps() remotely via mysqlctld server: %v", socketFile) diff --git a/go/vt/mysqlctl/schema.go b/go/vt/mysqlctl/schema.go index 397668145ef..50122224491 100644 --- a/go/vt/mysqlctl/schema.go +++ b/go/vt/mysqlctl/schema.go @@ -65,12 +65,6 @@ func (mysqld *Mysqld) executeSchemaCommands(ctx context.Context, sql string) err return mysqld.executeMysqlScript(ctx, params, sql) } -func encodeEntityName(name string) string { - var buf strings.Builder - sqltypes.NewVarChar(name).EncodeSQL(&buf) - return buf.String() -} - // tableListSQL returns an IN clause "('t1', 't2'...) for a list of tables." func tableListSQL(tables []string) (string, error) { if len(tables) == 0 { @@ -79,7 +73,7 @@ func tableListSQL(tables []string) (string, error) { encodedTables := make([]string, len(tables)) for i, tableName := range tables { - encodedTables[i] = encodeEntityName(tableName) + encodedTables[i] = sqltypes.EncodeStringSQL(tableName) } return "(" + strings.Join(encodedTables, ", ") + ")", nil @@ -306,9 +300,9 @@ func GetColumnsList(dbName, tableName string, exec func(string, int, bool) (*sql if dbName == "" { dbName2 = "database()" } else { - dbName2 = encodeEntityName(dbName) + dbName2 = sqltypes.EncodeStringSQL(dbName) } - query := fmt.Sprintf(GetColumnNamesQuery, dbName2, encodeEntityName(sqlescape.UnescapeID(tableName))) + query := fmt.Sprintf(GetColumnNamesQuery, dbName2, sqltypes.EncodeStringSQL(sqlescape.UnescapeID(tableName))) qr, err := exec(query, -1, true) if err != nil { return "", err @@ -395,7 +389,7 @@ func (mysqld *Mysqld) getPrimaryKeyColumns(ctx context.Context, dbName string, t FROM information_schema.STATISTICS WHERE TABLE_SCHEMA = %s AND TABLE_NAME IN %s AND LOWER(INDEX_NAME) = 'primary' ORDER BY table_name, SEQ_IN_INDEX` - sql = fmt.Sprintf(sql, encodeEntityName(dbName), tableList) + sql = fmt.Sprintf(sql, sqltypes.EncodeStringSQL(dbName), tableList) qr, err := conn.ExecuteFetch(sql, len(tables)*100, true) if err != nil { return nil, err @@ -624,8 +618,8 @@ func (mysqld *Mysqld) GetPrimaryKeyEquivalentColumns(ctx context.Context, dbName ) AS pke ON index_cols.INDEX_NAME = pke.INDEX_NAME WHERE index_cols.TABLE_SCHEMA = %s AND index_cols.TABLE_NAME = %s AND NON_UNIQUE = 0 AND NULLABLE != 'YES' ORDER BY SEQ_IN_INDEX ASC` - encodedDbName := encodeEntityName(dbName) - encodedTable := encodeEntityName(table) + encodedDbName := sqltypes.EncodeStringSQL(dbName) + encodedTable := sqltypes.EncodeStringSQL(table) sql = fmt.Sprintf(sql, encodedDbName, encodedTable, encodedDbName, encodedTable, encodedDbName, encodedTable) qr, err := conn.ExecuteFetch(sql, 1000, true) if err != nil { diff --git a/go/vt/schemadiff/table.go b/go/vt/schemadiff/table.go index b24184fe487..f07ed181d11 100644 --- a/go/vt/schemadiff/table.go +++ b/go/vt/schemadiff/table.go @@ -25,9 +25,9 @@ import ( golcs "github.com/yudai/golcs" - "vitess.io/vitess/go/mysql/collations/colldata" - "vitess.io/vitess/go/mysql/collations" + "vitess.io/vitess/go/mysql/collations/colldata" + "vitess.io/vitess/go/ptr" "vitess.io/vitess/go/vt/sqlparser" ) @@ -494,10 +494,7 @@ func (c *CreateTableEntity) normalizeColumnOptions() { // "show create table" reports it as a tinyint(1). if col.Type.Type == "boolean" { col.Type.Type = "tinyint" - col.Type.Length = &sqlparser.Literal{ - Type: sqlparser.IntVal, - Val: "1", - } + col.Type.Length = ptr.Of(1) if col.Type.Options.Default != nil { val, ok := col.Type.Options.Default.(sqlparser.BoolVal) @@ -526,16 +523,14 @@ func (c *CreateTableEntity) normalizeColumnOptions() { col.Type.Type = "double" } - if col.Type.Length != nil && col.Type.Scale == nil && col.Type.Length.Type == sqlparser.IntVal { - if l, err := strconv.ParseInt(col.Type.Length.Val, 10, 64); err == nil { - // See https://dev.mysql.com/doc/refman/8.0/en/floating-point-types.html, but the docs are - // subtly wrong. We use a float for a precision of 24, not a double as the documentation - // mentioned. Validated against the actual behavior of MySQL. - if l <= 24 { - col.Type.Type = "float" - } else { - col.Type.Type = "double" - } + if col.Type.Length != nil && col.Type.Scale == nil { + // See https://dev.mysql.com/doc/refman/8.0/en/floating-point-types.html, but the docs are + // subtly wrong. We use a float for a precision of 24, not a double as the documentation + // mentioned. Validated against the actual behavior of MySQL. + if *col.Type.Length <= 24 { + col.Type.Type = "float" + } else { + col.Type.Type = "double" } col.Type.Length = nil } @@ -594,7 +589,7 @@ func (c *CreateTableEntity) normalizeIndexOptions() { } func isBool(colType *sqlparser.ColumnType) bool { - return colType.Type == sqlparser.KeywordString(sqlparser.TINYINT) && colType.Length != nil && sqlparser.CanonicalString(colType.Length) == "1" + return colType.Type == sqlparser.KeywordString(sqlparser.TINYINT) && colType.Length != nil && *colType.Length == 1 } func (c *CreateTableEntity) normalizePartitionOptions() { diff --git a/go/vt/sqlparser/ast.go b/go/vt/sqlparser/ast.go index 2dcac11dcc1..1717fbb962b 100644 --- a/go/vt/sqlparser/ast.go +++ b/go/vt/sqlparser/ast.go @@ -1820,10 +1820,10 @@ type ColumnType struct { Options *ColumnTypeOptions // Numeric field options - Length *Literal + Length *int Unsigned bool Zerofill bool - Scale *Literal + Scale *int // Text field options Charset ColumnCharset @@ -3427,8 +3427,8 @@ func (ListArg) iColTuple() {} // ConvertType represents the type in call to CONVERT(expr, type) type ConvertType struct { Type string - Length *Literal - Scale *Literal + Length *int + Scale *int Charset ColumnCharset } diff --git a/go/vt/sqlparser/ast_clone.go b/go/vt/sqlparser/ast_clone.go index e41642b38d0..eb7c8306504 100644 --- a/go/vt/sqlparser/ast_clone.go +++ b/go/vt/sqlparser/ast_clone.go @@ -970,8 +970,8 @@ func CloneRefOfColumnType(n *ColumnType) *ColumnType { } out := *n out.Options = CloneRefOfColumnTypeOptions(n.Options) - out.Length = CloneRefOfLiteral(n.Length) - out.Scale = CloneRefOfLiteral(n.Scale) + out.Length = CloneRefOfInt(n.Length) + out.Scale = CloneRefOfInt(n.Scale) out.Charset = CloneColumnCharset(n.Charset) out.EnumValues = CloneSliceOfString(n.EnumValues) return &out @@ -1060,8 +1060,8 @@ func CloneRefOfConvertType(n *ConvertType) *ConvertType { return nil } out := *n - out.Length = CloneRefOfLiteral(n.Length) - out.Scale = CloneRefOfLiteral(n.Scale) + out.Length = CloneRefOfInt(n.Length) + out.Scale = CloneRefOfInt(n.Scale) out.Charset = CloneColumnCharset(n.Charset) return &out } @@ -4377,6 +4377,15 @@ func CloneRefOfColumnTypeOptions(n *ColumnTypeOptions) *ColumnTypeOptions { return &out } +// CloneRefOfInt creates a deep clone of the input. +func CloneRefOfInt(n *int) *int { + if n == nil { + return nil + } + out := *n + return &out +} + // CloneColumnCharset creates a deep clone of the input. func CloneColumnCharset(n ColumnCharset) ColumnCharset { return *CloneRefOfColumnCharset(&n) @@ -4553,15 +4562,6 @@ func CloneComments(n Comments) Comments { return res } -// CloneRefOfInt creates a deep clone of the input. -func CloneRefOfInt(n *int) *int { - if n == nil { - return nil - } - out := *n - return &out -} - // CloneSliceOfRefOfPartitionDefinition creates a deep clone of the input. func CloneSliceOfRefOfPartitionDefinition(n []*PartitionDefinition) []*PartitionDefinition { if n == nil { @@ -4712,7 +4712,7 @@ func CloneRefOfIndexColumn(n *IndexColumn) *IndexColumn { } out := *n out.Column = CloneIdentifierCI(n.Column) - out.Length = CloneRefOfLiteral(n.Length) + out.Length = CloneRefOfInt(n.Length) out.Expression = CloneExpr(n.Expression) return &out } diff --git a/go/vt/sqlparser/ast_copy_on_rewrite.go b/go/vt/sqlparser/ast_copy_on_rewrite.go index 9b1fcdf8e34..8491a86cfbd 100644 --- a/go/vt/sqlparser/ast_copy_on_rewrite.go +++ b/go/vt/sqlparser/ast_copy_on_rewrite.go @@ -1451,18 +1451,6 @@ func (c *cow) copyOnRewriteRefOfColumnType(n *ColumnType, parent SQLNode) (out S } out = n if c.pre == nil || c.pre(n, parent) { - _Length, changedLength := c.copyOnRewriteRefOfLiteral(n.Length, n) - _Scale, changedScale := c.copyOnRewriteRefOfLiteral(n.Scale, n) - if changedLength || changedScale { - res := *n - res.Length, _ = _Length.(*Literal) - res.Scale, _ = _Scale.(*Literal) - out = &res - if c.cloned != nil { - c.cloned(n, out) - } - changed = true - } } if c.post != nil { out, changed = c.postVisit(out, parent, changed) @@ -1622,18 +1610,6 @@ func (c *cow) copyOnRewriteRefOfConvertType(n *ConvertType, parent SQLNode) (out } out = n if c.pre == nil || c.pre(n, parent) { - _Length, changedLength := c.copyOnRewriteRefOfLiteral(n.Length, n) - _Scale, changedScale := c.copyOnRewriteRefOfLiteral(n.Scale, n) - if changedLength || changedScale { - res := *n - res.Length, _ = _Length.(*Literal) - res.Scale, _ = _Scale.(*Literal) - out = &res - if c.cloned != nil { - c.cloned(n, out) - } - changed = true - } } if c.post != nil { out, changed = c.postVisit(out, parent, changed) diff --git a/go/vt/sqlparser/ast_equals.go b/go/vt/sqlparser/ast_equals.go index 773d199007d..433737fa42b 100644 --- a/go/vt/sqlparser/ast_equals.go +++ b/go/vt/sqlparser/ast_equals.go @@ -2132,8 +2132,8 @@ func (cmp *Comparator) RefOfColumnType(a, b *ColumnType) bool { a.Unsigned == b.Unsigned && a.Zerofill == b.Zerofill && cmp.RefOfColumnTypeOptions(a.Options, b.Options) && - cmp.RefOfLiteral(a.Length, b.Length) && - cmp.RefOfLiteral(a.Scale, b.Scale) && + cmp.RefOfInt(a.Length, b.Length) && + cmp.RefOfInt(a.Scale, b.Scale) && cmp.ColumnCharset(a.Charset, b.Charset) && cmp.SliceOfString(a.EnumValues, b.EnumValues) } @@ -2233,8 +2233,8 @@ func (cmp *Comparator) RefOfConvertType(a, b *ConvertType) bool { return false } return a.Type == b.Type && - cmp.RefOfLiteral(a.Length, b.Length) && - cmp.RefOfLiteral(a.Scale, b.Scale) && + cmp.RefOfInt(a.Length, b.Length) && + cmp.RefOfInt(a.Scale, b.Scale) && cmp.ColumnCharset(a.Charset, b.Charset) } @@ -7252,6 +7252,17 @@ func (cmp *Comparator) RefOfColumnTypeOptions(a, b *ColumnTypeOptions) bool { cmp.RefOfLiteral(a.SRID, b.SRID) } +// RefOfInt does deep equals between the two objects. +func (cmp *Comparator) RefOfInt(a, b *int) bool { + if a == b { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b +} + // ColumnCharset does deep equals between the two objects. func (cmp *Comparator) ColumnCharset(a, b ColumnCharset) bool { return a.Name == b.Name && @@ -7450,17 +7461,6 @@ func (cmp *Comparator) Comments(a, b Comments) bool { return true } -// RefOfInt does deep equals between the two objects. -func (cmp *Comparator) RefOfInt(a, b *int) bool { - if a == b { - return true - } - if a == nil || b == nil { - return false - } - return *a == *b -} - // SliceOfRefOfPartitionDefinition does deep equals between the two objects. func (cmp *Comparator) SliceOfRefOfPartitionDefinition(a, b []*PartitionDefinition) bool { if len(a) != len(b) { @@ -7633,7 +7633,7 @@ func (cmp *Comparator) RefOfIndexColumn(a, b *IndexColumn) bool { return false } return cmp.IdentifierCI(a.Column, b.Column) && - cmp.RefOfLiteral(a.Length, b.Length) && + cmp.RefOfInt(a.Length, b.Length) && cmp.Expr(a.Expression, b.Expression) && a.Direction == b.Direction } diff --git a/go/vt/sqlparser/ast_format.go b/go/vt/sqlparser/ast_format.go index 16219f690b0..ead975dbb5f 100644 --- a/go/vt/sqlparser/ast_format.go +++ b/go/vt/sqlparser/ast_format.go @@ -709,10 +709,10 @@ func (ct *ColumnType) Format(buf *TrackedBuffer) { buf.astPrintf(ct, "%#s", ct.Type) if ct.Length != nil && ct.Scale != nil { - buf.astPrintf(ct, "(%v,%v)", ct.Length, ct.Scale) + buf.astPrintf(ct, "(%d,%d)", *ct.Length, *ct.Scale) } else if ct.Length != nil { - buf.astPrintf(ct, "(%v)", ct.Length) + buf.astPrintf(ct, "(%d)", *ct.Length) } if ct.EnumValues != nil { @@ -837,7 +837,7 @@ func (idx *IndexDefinition) Format(buf *TrackedBuffer) { } else { buf.astPrintf(idx, "%v", col.Column) if col.Length != nil { - buf.astPrintf(idx, "(%v)", col.Length) + buf.astPrintf(idx, "(%d)", *col.Length) } } if col.Direction == DescOrder { @@ -1859,9 +1859,9 @@ func (node *ConvertUsingExpr) Format(buf *TrackedBuffer) { func (node *ConvertType) Format(buf *TrackedBuffer) { buf.astPrintf(node, "%#s", node.Type) if node.Length != nil { - buf.astPrintf(node, "(%v", node.Length) + buf.astPrintf(node, "(%d", *node.Length) if node.Scale != nil { - buf.astPrintf(node, ", %v", node.Scale) + buf.astPrintf(node, ", %d", *node.Scale) } buf.astPrintf(node, ")") } diff --git a/go/vt/sqlparser/ast_format_fast.go b/go/vt/sqlparser/ast_format_fast.go index 944615958ec..13ad5b8813c 100644 --- a/go/vt/sqlparser/ast_format_fast.go +++ b/go/vt/sqlparser/ast_format_fast.go @@ -946,14 +946,14 @@ func (ct *ColumnType) formatFast(buf *TrackedBuffer) { if ct.Length != nil && ct.Scale != nil { buf.WriteByte('(') - ct.Length.formatFast(buf) + buf.WriteString(fmt.Sprintf("%d", *ct.Length)) buf.WriteByte(',') - ct.Scale.formatFast(buf) + buf.WriteString(fmt.Sprintf("%d", *ct.Scale)) buf.WriteByte(')') } else if ct.Length != nil { buf.WriteByte('(') - ct.Length.formatFast(buf) + buf.WriteString(fmt.Sprintf("%d", *ct.Length)) buf.WriteByte(')') } @@ -1150,7 +1150,7 @@ func (idx *IndexDefinition) formatFast(buf *TrackedBuffer) { col.Column.formatFast(buf) if col.Length != nil { buf.WriteByte('(') - col.Length.formatFast(buf) + buf.WriteString(fmt.Sprintf("%d", *col.Length)) buf.WriteByte(')') } } @@ -2513,10 +2513,10 @@ func (node *ConvertType) formatFast(buf *TrackedBuffer) { buf.WriteString(node.Type) if node.Length != nil { buf.WriteByte('(') - node.Length.formatFast(buf) + buf.WriteString(fmt.Sprintf("%d", *node.Length)) if node.Scale != nil { buf.WriteString(", ") - node.Scale.formatFast(buf) + buf.WriteString(fmt.Sprintf("%d", *node.Scale)) } buf.WriteByte(')') } diff --git a/go/vt/sqlparser/ast_funcs.go b/go/vt/sqlparser/ast_funcs.go index edfcb89e7e4..049eab2ed41 100644 --- a/go/vt/sqlparser/ast_funcs.go +++ b/go/vt/sqlparser/ast_funcs.go @@ -71,7 +71,7 @@ type IndexColumn struct { // Only one of Column or Expression can be specified // Length is an optional field which is only applicable when Column is used Column IdentifierCI - Length *Literal + Length *int Expression Expr Direction OrderDirection } @@ -79,8 +79,8 @@ type IndexColumn struct { // LengthScaleOption is used for types that have an optional length // and scale type LengthScaleOption struct { - Length *Literal - Scale *Literal + Length *int + Scale *int } // IndexOption is used for trailing options for indexes: COMMENT, KEY_BLOCK_SIZE, USING, WITH PARSER diff --git a/go/vt/sqlparser/ast_rewrite.go b/go/vt/sqlparser/ast_rewrite.go index 11bf2ad7d3e..71765ed90e3 100644 --- a/go/vt/sqlparser/ast_rewrite.go +++ b/go/vt/sqlparser/ast_rewrite.go @@ -1823,20 +1823,12 @@ func (a *application) rewriteRefOfColumnType(parent SQLNode, node *ColumnType, r return true } } - if !a.rewriteRefOfLiteral(node, node.Length, func(newNode, parent SQLNode) { - parent.(*ColumnType).Length = newNode.(*Literal) - }) { - return false - } - if !a.rewriteRefOfLiteral(node, node.Scale, func(newNode, parent SQLNode) { - parent.(*ColumnType).Scale = newNode.(*Literal) - }) { - return false - } if a.post != nil { - a.cur.replacer = replacer - a.cur.parent = parent - a.cur.node = node + if a.pre == nil { + a.cur.replacer = replacer + a.cur.parent = parent + a.cur.node = node + } if !a.post(&a.cur) { return false } @@ -2088,20 +2080,12 @@ func (a *application) rewriteRefOfConvertType(parent SQLNode, node *ConvertType, return true } } - if !a.rewriteRefOfLiteral(node, node.Length, func(newNode, parent SQLNode) { - parent.(*ConvertType).Length = newNode.(*Literal) - }) { - return false - } - if !a.rewriteRefOfLiteral(node, node.Scale, func(newNode, parent SQLNode) { - parent.(*ConvertType).Scale = newNode.(*Literal) - }) { - return false - } if a.post != nil { - a.cur.replacer = replacer - a.cur.parent = parent - a.cur.node = node + if a.pre == nil { + a.cur.replacer = replacer + a.cur.parent = parent + a.cur.node = node + } if !a.post(&a.cur) { return false } diff --git a/go/vt/sqlparser/ast_test.go b/go/vt/sqlparser/ast_test.go index bbb3d4453c7..7ed52bea336 100644 --- a/go/vt/sqlparser/ast_test.go +++ b/go/vt/sqlparser/ast_test.go @@ -735,6 +735,10 @@ func TestSplitStatementToPieces(t *testing.T) { // Ignore quoted semicolon input: ";create table t1 ';';;;create table t2 (id;", output: "create table t1 ';';create table t2 (id", + }, { + // Ignore quoted semicolon + input: "stop replica; start replica", + output: "stop replica; start replica", }, } diff --git a/go/vt/sqlparser/ast_visit.go b/go/vt/sqlparser/ast_visit.go index d88a6606137..1e4c0a16b58 100644 --- a/go/vt/sqlparser/ast_visit.go +++ b/go/vt/sqlparser/ast_visit.go @@ -1111,12 +1111,6 @@ func VisitRefOfColumnType(in *ColumnType, f Visit) error { if cont, err := f(in); err != nil || !cont { return err } - if err := VisitRefOfLiteral(in.Length, f); err != nil { - return err - } - if err := VisitRefOfLiteral(in.Scale, f); err != nil { - return err - } return nil } func VisitColumns(in Columns, f Visit) error { @@ -1224,12 +1218,6 @@ func VisitRefOfConvertType(in *ConvertType, f Visit) error { if cont, err := f(in); err != nil || !cont { return err } - if err := VisitRefOfLiteral(in.Length, f); err != nil { - return err - } - if err := VisitRefOfLiteral(in.Scale, f); err != nil { - return err - } return nil } func VisitRefOfConvertUsingExpr(in *ConvertUsingExpr, f Visit) error { diff --git a/go/vt/sqlparser/cached_size.go b/go/vt/sqlparser/cached_size.go index e7fcfd74fed..4474e3162ac 100644 --- a/go/vt/sqlparser/cached_size.go +++ b/go/vt/sqlparser/cached_size.go @@ -725,10 +725,10 @@ func (cached *ColumnType) CachedSize(alloc bool) int64 { size += hack.RuntimeAllocSize(int64(len(cached.Type))) // field Options *vitess.io/vitess/go/vt/sqlparser.ColumnTypeOptions size += cached.Options.CachedSize(true) - // field Length *vitess.io/vitess/go/vt/sqlparser.Literal - size += cached.Length.CachedSize(true) - // field Scale *vitess.io/vitess/go/vt/sqlparser.Literal - size += cached.Scale.CachedSize(true) + // field Length *int + size += hack.RuntimeAllocSize(int64(8)) + // field Scale *int + size += hack.RuntimeAllocSize(int64(8)) // field Charset vitess.io/vitess/go/vt/sqlparser.ColumnCharset size += cached.Charset.CachedSize(false) // field EnumValues []string @@ -907,10 +907,10 @@ func (cached *ConvertType) CachedSize(alloc bool) int64 { } // field Type string size += hack.RuntimeAllocSize(int64(len(cached.Type))) - // field Length *vitess.io/vitess/go/vt/sqlparser.Literal - size += cached.Length.CachedSize(true) - // field Scale *vitess.io/vitess/go/vt/sqlparser.Literal - size += cached.Scale.CachedSize(true) + // field Length *int + size += hack.RuntimeAllocSize(int64(8)) + // field Scale *int + size += hack.RuntimeAllocSize(int64(8)) // field Charset vitess.io/vitess/go/vt/sqlparser.ColumnCharset size += cached.Charset.CachedSize(false) return size @@ -1780,8 +1780,8 @@ func (cached *IndexColumn) CachedSize(alloc bool) int64 { } // field Column vitess.io/vitess/go/vt/sqlparser.IdentifierCI size += cached.Column.CachedSize(false) - // field Length *vitess.io/vitess/go/vt/sqlparser.Literal - size += cached.Length.CachedSize(true) + // field Length *int + size += hack.RuntimeAllocSize(int64(8)) // field Expression vitess.io/vitess/go/vt/sqlparser.Expr if cc, ok := cached.Expression.(cachedObject); ok { size += cc.CachedSize(true) diff --git a/go/vt/sqlparser/deep_ast_clone.go b/go/vt/sqlparser/deep_ast_clone.go index 8e1704fd446..65fd44052e5 100644 --- a/go/vt/sqlparser/deep_ast_clone.go +++ b/go/vt/sqlparser/deep_ast_clone.go @@ -975,8 +975,8 @@ func DeepCloneRefOfColumnType(n *ColumnType) *ColumnType { } out := *n out.Options = DeepCloneRefOfColumnTypeOptions(n.Options) - out.Length = DeepCloneRefOfLiteral(n.Length) - out.Scale = DeepCloneRefOfLiteral(n.Scale) + out.Length = DeepCloneRefOfInt(n.Length) + out.Scale = DeepCloneRefOfInt(n.Scale) out.Charset = DeepCloneColumnCharset(n.Charset) out.EnumValues = DeepCloneSliceOfString(n.EnumValues) return &out @@ -1065,8 +1065,8 @@ func DeepCloneRefOfConvertType(n *ConvertType) *ConvertType { return nil } out := *n - out.Length = DeepCloneRefOfLiteral(n.Length) - out.Scale = DeepCloneRefOfLiteral(n.Scale) + out.Length = DeepCloneRefOfInt(n.Length) + out.Scale = DeepCloneRefOfInt(n.Scale) out.Charset = DeepCloneColumnCharset(n.Charset) return &out } @@ -4712,7 +4712,7 @@ func DeepCloneRefOfIndexColumn(n *IndexColumn) *IndexColumn { } out := *n out.Column = DeepCloneIdentifierCI(n.Column) - out.Length = DeepCloneRefOfLiteral(n.Length) + out.Length = DeepCloneRefOfInt(n.Length) out.Expression = DeepCloneExpr(n.Expression) return &out } diff --git a/go/vt/sqlparser/goyacc/goyacc.go b/go/vt/sqlparser/goyacc/goyacc.go index 5864b5090b4..51650b0891e 100644 --- a/go/vt/sqlparser/goyacc/goyacc.go +++ b/go/vt/sqlparser/goyacc/goyacc.go @@ -49,7 +49,6 @@ import ( "bufio" "bytes" "fmt" - "go/format" "os" "regexp" "sort" @@ -58,6 +57,8 @@ import ( "unicode" "github.com/spf13/pflag" + + "vitess.io/vitess/go/tools/codegen" ) // the following are adjustable @@ -3326,7 +3327,7 @@ func exit(status int) { if ftable != nil { ftable.Flush() ftable = nil - gofmt() + _ = codegen.GoImports(oflag) } if foutput != nil { foutput.Flush() @@ -3339,18 +3340,6 @@ func exit(status int) { os.Exit(status) } -func gofmt() { - src, err := os.ReadFile(oflag) - if err != nil { - return - } - src, err = format.Source(src) - if err != nil { - return - } - os.WriteFile(oflag, src, 0666) -} - const fastAppendHelperText = ` func $$Iaddr(v any) __yyunsafe__.Pointer { type h struct { diff --git a/go/vt/sqlparser/sql.go b/go/vt/sqlparser/sql.go index e4e0f9b57f7..b8d95a94b1f 100644 --- a/go/vt/sqlparser/sql.go +++ b/go/vt/sqlparser/sql.go @@ -6,6 +6,8 @@ package sqlparser import ( __yyfmt__ "fmt" __yyunsafe__ "unsafe" + + "vitess.io/vitess/go/ptr" ) //line sql.y:17 @@ -7761,7 +7763,7 @@ var yyPgo = [...]int{ 316, 2817, 2815, 19, 5545, 9, 2814, 61, 164, 2811, 2810, 4477, 25, 48, 29, 2807, 213, 2800, 2795, 2794, 2793, 1201, 182, 135, 166, 129, 2792, 2791, 2789, 16, - 2788, 2783, 2777, 2768, 2767, 2766, 171, 39, 38, 37, + 2788, 2783, 2777, 2768, 2767, 2766, 39, 38, 37, 171, 211, 66, 14, 101, 174, 157, 90, 2765, 2764, 2763, 125, 102, 2757, 159, 158, 128, 121, 2667, 186, 147, 114, 2663, 771, 35, 2661, 2653, 2652, 2649, 91, 2637, @@ -7775,7 +7777,7 @@ var yyPgo = [...]int{ 2513, 207, 2505, } -//line sql.y:8852 +//line sql.y:8842 type yySymType struct { union any empty struct{} @@ -8064,6 +8066,11 @@ func (st *yySymType) insertActionUnion() InsertAction { return v } +func (st *yySymType) intPtrUnion() *int { + v, _ := st.union.(*int) + return v +} + func (st *yySymType) integerUnion() int { v, _ := st.union.(int) return v @@ -8506,7 +8513,7 @@ var yyR1 = [...]int{ 321, 323, 323, 323, 323, 323, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 324, 325, 325, 325, 325, 325, 325, 325, 325, - 340, 340, 326, 326, 334, 334, 335, 335, 336, 336, + 340, 340, 329, 329, 334, 334, 335, 335, 336, 336, 336, 337, 337, 337, 338, 338, 331, 331, 331, 331, 331, 331, 331, 331, 331, 333, 333, 332, 332, 332, 343, 368, 368, 367, 367, 365, 365, 365, 365, 365, @@ -8542,7 +8549,7 @@ var yyR1 = [...]int{ 372, 374, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 118, 117, 117, 116, 119, 119, 119, 119, 119, 119, 119, 119, 376, 376, 376, 63, 63, 377, - 327, 328, 329, 5, 6, 353, 375, 126, 126, 24, + 326, 327, 328, 5, 6, 353, 375, 126, 126, 24, 39, 39, 25, 25, 25, 25, 26, 26, 64, 67, 67, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, @@ -9117,7 +9124,7 @@ var yyChk = [...]int{ 369, 33, 269, 348, 349, 350, 351, 370, 371, 372, 373, 375, 376, 377, 378, 333, 354, 584, 334, 335, 336, 337, 338, 339, 341, 342, 345, 343, 344, 346, - 347, -386, -385, 87, 89, 88, -326, 87, -147, -139, + 347, -386, -385, 87, 89, 88, -329, 87, -147, -139, 242, -385, 243, 243, 243, -79, 477, -352, -352, -352, 277, 20, -46, -43, -378, 19, -42, -43, 234, 123, 124, 231, 87, -341, 87, -350, -386, -385, 87, 138, @@ -9176,8 +9183,8 @@ var yyChk = [...]int{ -174, -31, 605, 602, 15, -184, -185, -193, -301, -271, -314, -270, 88, 423, 425, 426, 77, 122, -147, -332, 178, -360, -359, -358, -341, -343, -344, -345, 89, -332, - -337, 385, 384, -326, -326, -326, -326, -326, -331, -331, - -331, -331, 87, 87, -326, -326, -326, -326, -334, 87, + -337, 385, 384, -329, -329, -329, -329, -329, -331, -331, + -331, -331, 87, 87, -329, -329, -329, -329, -334, 87, -334, -334, -335, -334, 87, -335, -336, 87, -336, -371, -147, -368, -367, -365, -366, 252, 101, 678, 634, 586, 627, 668, 78, -363, -234, 96, -428, -145, -287, 247, @@ -9261,7 +9268,7 @@ var yyChk = [...]int{ 88, 89, 89, 88, -289, -288, -45, -44, -352, -352, 96, -385, 90, 90, 244, 27, -191, 77, 77, 77, -115, 738, 96, 87, -3, 82, -147, 87, 20, -341, - -218, -376, -327, -377, -328, -329, -5, -6, -353, -118, + -218, -376, -326, -377, -327, -328, -5, -6, -353, -118, 58, 101, -63, 45, 243, 718, 719, 127, -427, 731, -368, -255, -372, -374, -191, -151, -427, -162, -149, -148, -150, -156, 168, 169, 269, 348, 349, -219, 89, -255, @@ -9269,7 +9276,7 @@ var yyChk = [...]int{ 288, 293, 382, 257, 256, 288, 293, 382, 257, 256, 90, -411, 321, 90, -411, -191, -84, -49, -191, -284, -284, 34, -385, -428, -163, -155, -127, 163, 586, -318, - 592, -326, -326, -326, -336, -326, 338, -326, 338, -326, + 592, -329, -329, -329, -336, -329, 338, -329, 338, -329, -428, -428, -428, 88, -428, 23, -428, -147, 88, -123, 482, 88, 88, -428, 87, 87, -147, -428, -428, -428, 88, -428, -428, -428, -428, -428, -428, -428, -428, -428, @@ -9340,7 +9347,7 @@ var yyChk = [...]int{ -147, 88, -428, -428, -428, -107, 88, -104, -103, -295, 77, 122, -268, -295, -259, -427, -420, 56, -418, 50, 20, 89, -428, -427, -235, 89, -239, -29, 87, -3, - 281, -327, -377, -328, -329, -5, -6, -353, -82, 586, + 281, -326, -377, -327, -328, -5, -6, -353, -82, 586, -379, -357, -301, -297, 90, 96, 89, 89, 586, -428, -428, -90, 146, 708, 676, -157, 224, -428, 88, -428, 88, -428, 88, -295, 248, -105, 88, 26, -254, -421, @@ -10274,7 +10281,7 @@ yydefault: case 1: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:609 +//line sql.y:613 { stmt := yyDollar[2].statementUnion() // If the statement is empty and we have comments @@ -10288,58 +10295,58 @@ yydefault: } case 2: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:622 +//line sql.y:626 { } case 3: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:623 +//line sql.y:627 { } case 4: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:627 +//line sql.y:631 { yyLOCAL = yyDollar[1].selStmtUnion() } yyVAL.union = yyLOCAL case 41: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:667 +//line sql.y:671 { setParseTree(yylex, nil) } case 42: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Variable -//line sql.y:673 +//line sql.y:677 { yyLOCAL = NewVariableExpression(yyDollar[1].str, SingleAt) } yyVAL.union = yyLOCAL case 43: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:679 +//line sql.y:683 { yyVAL.identifierCI = NewIdentifierCI(string(yyDollar[1].str)) } case 44: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:684 +//line sql.y:688 { yyVAL.identifierCI = NewIdentifierCI("") } case 45: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:688 +//line sql.y:692 { yyVAL.identifierCI = yyDollar[1].identifierCI } case 46: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Variable -//line sql.y:694 +//line sql.y:698 { yyLOCAL = NewVariableExpression(string(yyDollar[1].str), SingleAt) } @@ -10347,7 +10354,7 @@ yydefault: case 47: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Variable -//line sql.y:698 +//line sql.y:702 { yyLOCAL = NewVariableExpression(string(yyDollar[1].str), DoubleAt) } @@ -10355,7 +10362,7 @@ yydefault: case 48: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:704 +//line sql.y:708 { yyLOCAL = &OtherAdmin{} } @@ -10363,7 +10370,7 @@ yydefault: case 49: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:710 +//line sql.y:714 { yyLOCAL = &Load{} } @@ -10371,7 +10378,7 @@ yydefault: case 50: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *With -//line sql.y:716 +//line sql.y:720 { yyLOCAL = &With{ctes: yyDollar[2].ctesUnion(), Recursive: false} } @@ -10379,7 +10386,7 @@ yydefault: case 51: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *With -//line sql.y:720 +//line sql.y:724 { yyLOCAL = &With{ctes: yyDollar[3].ctesUnion(), Recursive: true} } @@ -10387,7 +10394,7 @@ yydefault: case 52: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *With -//line sql.y:725 +//line sql.y:729 { yyLOCAL = nil } @@ -10395,14 +10402,14 @@ yydefault: case 53: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *With -//line sql.y:729 +//line sql.y:733 { yyLOCAL = yyDollar[1].withUnion() } yyVAL.union = yyLOCAL case 54: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:735 +//line sql.y:739 { yySLICE := (*[]*CommonTableExpr)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].cteUnion()) @@ -10410,7 +10417,7 @@ yydefault: case 55: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*CommonTableExpr -//line sql.y:739 +//line sql.y:743 { yyLOCAL = []*CommonTableExpr{yyDollar[1].cteUnion()} } @@ -10418,7 +10425,7 @@ yydefault: case 56: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *CommonTableExpr -//line sql.y:745 +//line sql.y:749 { yyLOCAL = &CommonTableExpr{ID: yyDollar[1].identifierCS, Columns: yyDollar[2].columnsUnion(), Subquery: yyDollar[4].subqueryUnion()} } @@ -10426,7 +10433,7 @@ yydefault: case 57: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:751 +//line sql.y:755 { yyLOCAL = yyDollar[2].selStmtUnion() } @@ -10434,7 +10441,7 @@ yydefault: case 58: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:755 +//line sql.y:759 { yyLOCAL = yyDollar[2].selStmtUnion() } @@ -10442,7 +10449,7 @@ yydefault: case 59: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:759 +//line sql.y:763 { setLockInSelect(yyDollar[2].selStmtUnion(), yyDollar[3].lockUnion()) yyLOCAL = yyDollar[2].selStmtUnion() @@ -10451,7 +10458,7 @@ yydefault: case 60: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:782 +//line sql.y:786 { yyDollar[1].selStmtUnion().SetOrderBy(yyDollar[2].orderByUnion()) yyDollar[1].selStmtUnion().SetLimit(yyDollar[3].limitUnion()) @@ -10461,7 +10468,7 @@ yydefault: case 61: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:788 +//line sql.y:792 { yyDollar[1].selStmtUnion().SetLimit(yyDollar[2].limitUnion()) yyLOCAL = yyDollar[1].selStmtUnion() @@ -10470,7 +10477,7 @@ yydefault: case 62: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:793 +//line sql.y:797 { yyDollar[1].selStmtUnion().SetOrderBy(yyDollar[2].orderByUnion()) yyDollar[1].selStmtUnion().SetLimit(yyDollar[3].limitUnion()) @@ -10480,7 +10487,7 @@ yydefault: case 63: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:799 +//line sql.y:803 { yyDollar[2].selStmtUnion().SetWith(yyDollar[1].withUnion()) yyDollar[2].selStmtUnion().SetOrderBy(yyDollar[3].orderByUnion()) @@ -10491,7 +10498,7 @@ yydefault: case 64: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:806 +//line sql.y:810 { yyDollar[2].selStmtUnion().SetWith(yyDollar[1].withUnion()) yyDollar[2].selStmtUnion().SetLimit(yyDollar[3].limitUnion()) @@ -10501,7 +10508,7 @@ yydefault: case 65: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:812 +//line sql.y:816 { yyDollar[2].selStmtUnion().SetWith(yyDollar[1].withUnion()) yyDollar[2].selStmtUnion().SetOrderBy(yyDollar[3].orderByUnion()) @@ -10511,14 +10518,14 @@ yydefault: yyVAL.union = yyLOCAL case 66: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:819 +//line sql.y:823 { yyDollar[2].selStmtUnion().SetWith(yyDollar[1].withUnion()) } case 67: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:823 +//line sql.y:827 { yyLOCAL = NewSelect(Comments(yyDollar[2].strs), SelectExprs{&Nextval{Expr: yyDollar[5].exprUnion()}}, []string{yyDollar[3].str} /*options*/, nil, TableExprs{&AliasedTableExpr{Expr: yyDollar[7].tableName}}, nil /*where*/, nil /*groupBy*/, nil /*having*/, nil) } @@ -10526,7 +10533,7 @@ yydefault: case 68: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:829 +//line sql.y:833 { yyLOCAL = yyDollar[1].selStmtUnion() } @@ -10534,7 +10541,7 @@ yydefault: case 69: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:833 +//line sql.y:837 { yyLOCAL = &Union{Left: yyDollar[1].selStmtUnion(), Distinct: yyDollar[2].booleanUnion(), Right: yyDollar[3].selStmtUnion()} } @@ -10542,7 +10549,7 @@ yydefault: case 70: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:837 +//line sql.y:841 { yyLOCAL = &Union{Left: yyDollar[1].selStmtUnion(), Distinct: yyDollar[2].booleanUnion(), Right: yyDollar[3].selStmtUnion()} } @@ -10550,7 +10557,7 @@ yydefault: case 71: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:841 +//line sql.y:845 { yyLOCAL = &Union{Left: yyDollar[1].selStmtUnion(), Distinct: yyDollar[2].booleanUnion(), Right: yyDollar[3].selStmtUnion()} } @@ -10558,7 +10565,7 @@ yydefault: case 72: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:845 +//line sql.y:849 { yyLOCAL = &Union{Left: yyDollar[1].selStmtUnion(), Distinct: yyDollar[2].booleanUnion(), Right: yyDollar[3].selStmtUnion()} } @@ -10566,7 +10573,7 @@ yydefault: case 73: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:851 +//line sql.y:855 { yyLOCAL = yyDollar[1].selStmtUnion() } @@ -10574,7 +10581,7 @@ yydefault: case 74: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:855 +//line sql.y:859 { setLockInSelect(yyDollar[1].selStmtUnion(), yyDollar[2].lockUnion()) yyLOCAL = yyDollar[1].selStmtUnion() @@ -10583,7 +10590,7 @@ yydefault: case 75: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:860 +//line sql.y:864 { yyLOCAL = yyDollar[1].selStmtUnion() } @@ -10591,7 +10598,7 @@ yydefault: case 76: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:864 +//line sql.y:868 { yyLOCAL = yyDollar[1].selStmtUnion() } @@ -10599,7 +10606,7 @@ yydefault: case 77: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:870 +//line sql.y:874 { yyLOCAL = yyDollar[2].selStmtUnion() } @@ -10607,7 +10614,7 @@ yydefault: case 78: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:874 +//line sql.y:878 { yyDollar[1].selStmtUnion().SetInto(yyDollar[2].selectIntoUnion()) yyLOCAL = yyDollar[1].selStmtUnion() @@ -10616,7 +10623,7 @@ yydefault: case 79: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:879 +//line sql.y:883 { yyDollar[1].selStmtUnion().SetInto(yyDollar[2].selectIntoUnion()) yyDollar[1].selStmtUnion().SetLock(yyDollar[3].lockUnion()) @@ -10626,7 +10633,7 @@ yydefault: case 80: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:885 +//line sql.y:889 { yyDollar[1].selStmtUnion().SetInto(yyDollar[3].selectIntoUnion()) yyDollar[1].selStmtUnion().SetLock(yyDollar[2].lockUnion()) @@ -10636,7 +10643,7 @@ yydefault: case 81: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:891 +//line sql.y:895 { yyDollar[1].selStmtUnion().SetInto(yyDollar[2].selectIntoUnion()) yyLOCAL = yyDollar[1].selStmtUnion() @@ -10645,7 +10652,7 @@ yydefault: case 82: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:898 +//line sql.y:902 { yyLOCAL = &Stream{Comments: Comments(yyDollar[2].strs).Parsed(), SelectExpr: yyDollar[3].selectExprUnion(), Table: yyDollar[5].tableName} } @@ -10653,7 +10660,7 @@ yydefault: case 83: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:904 +//line sql.y:908 { yyLOCAL = &VStream{Comments: Comments(yyDollar[2].strs).Parsed(), SelectExpr: yyDollar[3].selectExprUnion(), Table: yyDollar[5].tableName, Where: NewWhere(WhereClause, yyDollar[6].exprUnion()), Limit: yyDollar[7].limitUnion()} } @@ -10661,7 +10668,7 @@ yydefault: case 84: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:912 +//line sql.y:916 { yyLOCAL = NewSelect(Comments(yyDollar[2].strs), yyDollar[4].selectExprsUnion() /*SelectExprs*/, yyDollar[3].strs /*options*/, yyDollar[5].selectIntoUnion() /*into*/, yyDollar[6].tableExprsUnion() /*from*/, NewWhere(WhereClause, yyDollar[7].exprUnion()), GroupBy(yyDollar[8].exprsUnion()), NewWhere(HavingClause, yyDollar[9].exprUnion()), yyDollar[10].namedWindowsUnion()) } @@ -10669,7 +10676,7 @@ yydefault: case 85: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL SelectStatement -//line sql.y:916 +//line sql.y:920 { yyLOCAL = NewSelect(Comments(yyDollar[2].strs), yyDollar[4].selectExprsUnion() /*SelectExprs*/, yyDollar[3].strs /*options*/, nil, yyDollar[5].tableExprsUnion() /*from*/, NewWhere(WhereClause, yyDollar[6].exprUnion()), GroupBy(yyDollar[7].exprsUnion()), NewWhere(HavingClause, yyDollar[8].exprUnion()), yyDollar[9].namedWindowsUnion()) } @@ -10677,7 +10684,7 @@ yydefault: case 86: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:922 +//line sql.y:926 { // insert_data returns a *Insert pre-filled with Columns & Values ins := yyDollar[6].insUnion() @@ -10693,7 +10700,7 @@ yydefault: case 87: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:934 +//line sql.y:938 { cols := make(Columns, 0, len(yyDollar[7].updateExprsUnion())) vals := make(ValTuple, 0, len(yyDollar[8].updateExprsUnion())) @@ -10707,7 +10714,7 @@ yydefault: case 88: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL InsertAction -//line sql.y:946 +//line sql.y:950 { yyLOCAL = InsertAct } @@ -10715,7 +10722,7 @@ yydefault: case 89: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL InsertAction -//line sql.y:950 +//line sql.y:954 { yyLOCAL = ReplaceAct } @@ -10723,7 +10730,7 @@ yydefault: case 90: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Statement -//line sql.y:956 +//line sql.y:960 { yyLOCAL = &Update{With: yyDollar[1].withUnion(), Comments: Comments(yyDollar[3].strs).Parsed(), Ignore: yyDollar[4].ignoreUnion(), TableExprs: yyDollar[5].tableExprsUnion(), Exprs: yyDollar[7].updateExprsUnion(), Where: NewWhere(WhereClause, yyDollar[8].exprUnion()), OrderBy: yyDollar[9].orderByUnion(), Limit: yyDollar[10].limitUnion()} } @@ -10731,7 +10738,7 @@ yydefault: case 91: yyDollar = yyS[yypt-11 : yypt+1] var yyLOCAL Statement -//line sql.y:962 +//line sql.y:966 { yyLOCAL = &Delete{With: yyDollar[1].withUnion(), Comments: Comments(yyDollar[3].strs).Parsed(), Ignore: yyDollar[4].ignoreUnion(), TableExprs: TableExprs{&AliasedTableExpr{Expr: yyDollar[6].tableName, As: yyDollar[7].identifierCS}}, Partitions: yyDollar[8].partitionsUnion(), Where: NewWhere(WhereClause, yyDollar[9].exprUnion()), OrderBy: yyDollar[10].orderByUnion(), Limit: yyDollar[11].limitUnion()} } @@ -10739,7 +10746,7 @@ yydefault: case 92: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL Statement -//line sql.y:966 +//line sql.y:970 { yyLOCAL = &Delete{With: yyDollar[1].withUnion(), Comments: Comments(yyDollar[3].strs).Parsed(), Ignore: yyDollar[4].ignoreUnion(), Targets: yyDollar[6].tableNamesUnion(), TableExprs: yyDollar[8].tableExprsUnion(), Where: NewWhere(WhereClause, yyDollar[9].exprUnion())} } @@ -10747,7 +10754,7 @@ yydefault: case 93: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:970 +//line sql.y:974 { yyLOCAL = &Delete{With: yyDollar[1].withUnion(), Comments: Comments(yyDollar[3].strs).Parsed(), Ignore: yyDollar[4].ignoreUnion(), Targets: yyDollar[5].tableNamesUnion(), TableExprs: yyDollar[7].tableExprsUnion(), Where: NewWhere(WhereClause, yyDollar[8].exprUnion())} } @@ -10755,32 +10762,32 @@ yydefault: case 94: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:974 +//line sql.y:978 { yyLOCAL = &Delete{With: yyDollar[1].withUnion(), Comments: Comments(yyDollar[3].strs).Parsed(), Ignore: yyDollar[4].ignoreUnion(), Targets: yyDollar[5].tableNamesUnion(), TableExprs: yyDollar[7].tableExprsUnion(), Where: NewWhere(WhereClause, yyDollar[8].exprUnion())} } yyVAL.union = yyLOCAL case 95: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:979 +//line sql.y:983 { } case 96: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:980 +//line sql.y:984 { } case 97: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableNames -//line sql.y:984 +//line sql.y:988 { yyLOCAL = TableNames{yyDollar[1].tableName} } yyVAL.union = yyLOCAL case 98: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:988 +//line sql.y:992 { yySLICE := (*TableNames)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].tableName) @@ -10788,14 +10795,14 @@ yydefault: case 99: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableNames -//line sql.y:994 +//line sql.y:998 { yyLOCAL = TableNames{yyDollar[1].tableName} } yyVAL.union = yyLOCAL case 100: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:998 +//line sql.y:1002 { yySLICE := (*TableNames)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].tableName) @@ -10803,14 +10810,14 @@ yydefault: case 101: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableNames -//line sql.y:1004 +//line sql.y:1008 { yyLOCAL = TableNames{yyDollar[1].tableName} } yyVAL.union = yyLOCAL case 102: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1008 +//line sql.y:1012 { yySLICE := (*TableNames)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].tableName) @@ -10818,7 +10825,7 @@ yydefault: case 103: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Partitions -//line sql.y:1013 +//line sql.y:1017 { yyLOCAL = nil } @@ -10826,7 +10833,7 @@ yydefault: case 104: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Partitions -//line sql.y:1017 +//line sql.y:1021 { yyLOCAL = yyDollar[3].partitionsUnion() } @@ -10834,7 +10841,7 @@ yydefault: case 105: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:1023 +//line sql.y:1027 { yyLOCAL = NewSetStatement(Comments(yyDollar[2].strs).Parsed(), yyDollar[3].setExprsUnion()) } @@ -10842,14 +10849,14 @@ yydefault: case 106: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SetExprs -//line sql.y:1029 +//line sql.y:1033 { yyLOCAL = SetExprs{yyDollar[1].setExprUnion()} } yyVAL.union = yyLOCAL case 107: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1033 +//line sql.y:1037 { yySLICE := (*SetExprs)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].setExprUnion()) @@ -10857,7 +10864,7 @@ yydefault: case 108: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1039 +//line sql.y:1043 { yyLOCAL = &SetExpr{Var: yyDollar[1].variableUnion(), Expr: NewStrLiteral("on")} } @@ -10865,7 +10872,7 @@ yydefault: case 109: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1043 +//line sql.y:1047 { yyLOCAL = &SetExpr{Var: yyDollar[1].variableUnion(), Expr: NewStrLiteral("off")} } @@ -10873,7 +10880,7 @@ yydefault: case 110: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1047 +//line sql.y:1051 { yyLOCAL = &SetExpr{Var: yyDollar[1].variableUnion(), Expr: yyDollar[3].exprUnion()} } @@ -10881,7 +10888,7 @@ yydefault: case 111: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1051 +//line sql.y:1055 { yyLOCAL = &SetExpr{Var: NewSetVariable(string(yyDollar[1].str), SessionScope), Expr: yyDollar[2].exprUnion()} } @@ -10889,7 +10896,7 @@ yydefault: case 112: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Variable -//line sql.y:1057 +//line sql.y:1061 { yyLOCAL = NewSetVariable(string(yyDollar[1].str), SessionScope) } @@ -10897,7 +10904,7 @@ yydefault: case 113: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Variable -//line sql.y:1061 +//line sql.y:1065 { yyLOCAL = yyDollar[1].variableUnion() } @@ -10905,7 +10912,7 @@ yydefault: case 114: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Variable -//line sql.y:1065 +//line sql.y:1069 { yyLOCAL = NewSetVariable(string(yyDollar[2].str), yyDollar[1].scopeUnion()) } @@ -10913,7 +10920,7 @@ yydefault: case 115: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:1071 +//line sql.y:1075 { yyLOCAL = NewSetStatement(Comments(yyDollar[2].strs).Parsed(), UpdateSetExprsScope(yyDollar[5].setExprsUnion(), yyDollar[3].scopeUnion())) } @@ -10921,7 +10928,7 @@ yydefault: case 116: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:1075 +//line sql.y:1079 { yyLOCAL = NewSetStatement(Comments(yyDollar[2].strs).Parsed(), yyDollar[4].setExprsUnion()) } @@ -10929,14 +10936,14 @@ yydefault: case 117: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SetExprs -//line sql.y:1081 +//line sql.y:1085 { yyLOCAL = SetExprs{yyDollar[1].setExprUnion()} } yyVAL.union = yyLOCAL case 118: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1085 +//line sql.y:1089 { yySLICE := (*SetExprs)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].setExprUnion()) @@ -10944,7 +10951,7 @@ yydefault: case 119: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1091 +//line sql.y:1095 { yyLOCAL = &SetExpr{Var: NewSetVariable(TransactionIsolationStr, NextTxScope), Expr: NewStrLiteral(yyDollar[3].str)} } @@ -10952,7 +10959,7 @@ yydefault: case 120: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1095 +//line sql.y:1099 { yyLOCAL = &SetExpr{Var: NewSetVariable(TransactionReadOnlyStr, NextTxScope), Expr: NewStrLiteral("off")} } @@ -10960,39 +10967,39 @@ yydefault: case 121: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SetExpr -//line sql.y:1099 +//line sql.y:1103 { yyLOCAL = &SetExpr{Var: NewSetVariable(TransactionReadOnlyStr, NextTxScope), Expr: NewStrLiteral("on")} } yyVAL.union = yyLOCAL case 122: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1105 +//line sql.y:1109 { yyVAL.str = RepeatableReadStr } case 123: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1109 +//line sql.y:1113 { yyVAL.str = ReadCommittedStr } case 124: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1113 +//line sql.y:1117 { yyVAL.str = ReadUncommittedStr } case 125: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1117 +//line sql.y:1121 { yyVAL.str = SerializableStr } case 126: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Scope -//line sql.y:1123 +//line sql.y:1127 { yyLOCAL = SessionScope } @@ -11000,7 +11007,7 @@ yydefault: case 127: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Scope -//line sql.y:1127 +//line sql.y:1131 { yyLOCAL = SessionScope } @@ -11008,7 +11015,7 @@ yydefault: case 128: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Scope -//line sql.y:1131 +//line sql.y:1135 { yyLOCAL = GlobalScope } @@ -11016,7 +11023,7 @@ yydefault: case 129: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:1137 +//line sql.y:1141 { yyDollar[1].createTableUnion().TableSpec = yyDollar[2].tableSpecUnion() yyDollar[1].createTableUnion().FullyParsed = true @@ -11026,7 +11033,7 @@ yydefault: case 130: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:1143 +//line sql.y:1147 { // Create table [name] like [name] yyDollar[1].createTableUnion().OptLike = yyDollar[2].optLikeUnion() @@ -11037,7 +11044,7 @@ yydefault: case 131: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:1150 +//line sql.y:1154 { indexDef := yyDollar[1].alterTableUnion().AlterOptions[0].(*AddIndexDefinition).IndexDefinition indexDef.Columns = yyDollar[3].indexColumnsUnion() @@ -11050,7 +11057,7 @@ yydefault: case 132: yyDollar = yyS[yypt-12 : yypt+1] var yyLOCAL Statement -//line sql.y:1159 +//line sql.y:1163 { yyLOCAL = &CreateView{ViewName: yyDollar[8].tableName, Comments: Comments(yyDollar[2].strs).Parsed(), IsReplace: yyDollar[3].booleanUnion(), Algorithm: yyDollar[4].str, Definer: yyDollar[5].definerUnion(), Security: yyDollar[6].str, Columns: yyDollar[9].columnsUnion(), Select: yyDollar[11].selStmtUnion(), CheckOption: yyDollar[12].str} } @@ -11058,7 +11065,7 @@ yydefault: case 133: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:1163 +//line sql.y:1167 { yyDollar[1].createDatabaseUnion().FullyParsed = true yyDollar[1].createDatabaseUnion().CreateOptions = yyDollar[2].databaseOptionsUnion() @@ -11068,7 +11075,7 @@ yydefault: case 134: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:1170 +//line sql.y:1174 { yyLOCAL = false } @@ -11076,39 +11083,39 @@ yydefault: case 135: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:1174 +//line sql.y:1178 { yyLOCAL = true } yyVAL.union = yyLOCAL case 136: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:1179 +//line sql.y:1183 { yyVAL.identifierCI = NewIdentifierCI("") } case 137: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1183 +//line sql.y:1187 { yyVAL.identifierCI = yyDollar[2].identifierCI } case 138: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1189 +//line sql.y:1193 { yyVAL.identifierCI = yyDollar[1].identifierCI } case 139: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1196 +//line sql.y:1200 { yyVAL.identifierCI = yyDollar[1].identifierCI } case 140: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []VindexParam -//line sql.y:1201 +//line sql.y:1205 { var v []VindexParam yyLOCAL = v @@ -11117,7 +11124,7 @@ yydefault: case 141: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL []VindexParam -//line sql.y:1206 +//line sql.y:1210 { yyLOCAL = yyDollar[2].vindexParamsUnion() } @@ -11125,7 +11132,7 @@ yydefault: case 142: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []VindexParam -//line sql.y:1212 +//line sql.y:1216 { yyLOCAL = make([]VindexParam, 0, 4) yyLOCAL = append(yyLOCAL, yyDollar[1].vindexParam) @@ -11133,21 +11140,21 @@ yydefault: yyVAL.union = yyLOCAL case 143: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1217 +//line sql.y:1221 { yySLICE := (*[]VindexParam)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].vindexParam) } case 144: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1223 +//line sql.y:1227 { yyVAL.vindexParam = VindexParam{Key: yyDollar[1].identifierCI, Val: yyDollar[3].str} } case 145: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []*JSONObjectParam -//line sql.y:1228 +//line sql.y:1232 { yyLOCAL = nil } @@ -11155,7 +11162,7 @@ yydefault: case 146: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*JSONObjectParam -//line sql.y:1232 +//line sql.y:1236 { yyLOCAL = yyDollar[1].jsonObjectParamsUnion() } @@ -11163,28 +11170,28 @@ yydefault: case 147: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*JSONObjectParam -//line sql.y:1238 +//line sql.y:1242 { yyLOCAL = []*JSONObjectParam{yyDollar[1].jsonObjectParam} } yyVAL.union = yyLOCAL case 148: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1242 +//line sql.y:1246 { yySLICE := (*[]*JSONObjectParam)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].jsonObjectParam) } case 149: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1248 +//line sql.y:1252 { yyVAL.jsonObjectParam = &JSONObjectParam{Key: yyDollar[1].exprUnion(), Value: yyDollar[3].exprUnion()} } case 150: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *CreateTable -//line sql.y:1254 +//line sql.y:1258 { yyLOCAL = &CreateTable{Comments: Comments(yyDollar[2].strs).Parsed(), Table: yyDollar[6].tableName, IfNotExists: yyDollar[5].booleanUnion(), Temp: yyDollar[3].booleanUnion()} setDDL(yylex, yyLOCAL) @@ -11193,7 +11200,7 @@ yydefault: case 151: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *AlterTable -//line sql.y:1261 +//line sql.y:1265 { yyLOCAL = &AlterTable{Comments: Comments(yyDollar[2].strs).Parsed(), Table: yyDollar[4].tableName} setDDL(yylex, yyLOCAL) @@ -11202,7 +11209,7 @@ yydefault: case 152: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *AlterTable -//line sql.y:1268 +//line sql.y:1272 { yyLOCAL = &AlterTable{Table: yyDollar[7].tableName, AlterOptions: []AlterOption{&AddIndexDefinition{IndexDefinition: &IndexDefinition{Info: &IndexInfo{Name: yyDollar[4].identifierCI, Type: string(yyDollar[3].str)}, Options: yyDollar[5].indexOptionsUnion()}}}} setDDL(yylex, yyLOCAL) @@ -11211,7 +11218,7 @@ yydefault: case 153: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *AlterTable -//line sql.y:1273 +//line sql.y:1277 { yyLOCAL = &AlterTable{Table: yyDollar[8].tableName, AlterOptions: []AlterOption{&AddIndexDefinition{IndexDefinition: &IndexDefinition{Info: &IndexInfo{Name: yyDollar[5].identifierCI, Type: string(yyDollar[3].str) + " " + string(yyDollar[4].str), Fulltext: true}, Options: yyDollar[6].indexOptionsUnion()}}}} setDDL(yylex, yyLOCAL) @@ -11220,7 +11227,7 @@ yydefault: case 154: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *AlterTable -//line sql.y:1278 +//line sql.y:1282 { yyLOCAL = &AlterTable{Table: yyDollar[8].tableName, AlterOptions: []AlterOption{&AddIndexDefinition{IndexDefinition: &IndexDefinition{Info: &IndexInfo{Name: yyDollar[5].identifierCI, Type: string(yyDollar[3].str) + " " + string(yyDollar[4].str), Spatial: true}, Options: yyDollar[6].indexOptionsUnion()}}}} setDDL(yylex, yyLOCAL) @@ -11229,7 +11236,7 @@ yydefault: case 155: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *AlterTable -//line sql.y:1283 +//line sql.y:1287 { yyLOCAL = &AlterTable{Table: yyDollar[8].tableName, AlterOptions: []AlterOption{&AddIndexDefinition{IndexDefinition: &IndexDefinition{Info: &IndexInfo{Name: yyDollar[5].identifierCI, Type: string(yyDollar[3].str) + " " + string(yyDollar[4].str), Unique: true}, Options: yyDollar[6].indexOptionsUnion()}}}} setDDL(yylex, yyLOCAL) @@ -11238,7 +11245,7 @@ yydefault: case 156: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *CreateDatabase -//line sql.y:1290 +//line sql.y:1294 { yyLOCAL = &CreateDatabase{Comments: Comments(yyDollar[4].strs).Parsed(), DBName: yyDollar[6].identifierCS, IfNotExists: yyDollar[5].booleanUnion()} setDDL(yylex, yyLOCAL) @@ -11247,7 +11254,7 @@ yydefault: case 157: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *AlterDatabase -//line sql.y:1297 +//line sql.y:1301 { yyLOCAL = &AlterDatabase{} setDDL(yylex, yyLOCAL) @@ -11256,7 +11263,7 @@ yydefault: case 160: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *TableSpec -//line sql.y:1308 +//line sql.y:1312 { yyLOCAL = yyDollar[2].tableSpecUnion() yyLOCAL.Options = yyDollar[4].tableOptionsUnion() @@ -11266,7 +11273,7 @@ yydefault: case 161: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []DatabaseOption -//line sql.y:1315 +//line sql.y:1319 { yyLOCAL = nil } @@ -11274,7 +11281,7 @@ yydefault: case 162: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []DatabaseOption -//line sql.y:1319 +//line sql.y:1323 { yyLOCAL = yyDollar[1].databaseOptionsUnion() } @@ -11282,7 +11289,7 @@ yydefault: case 163: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []DatabaseOption -//line sql.y:1325 +//line sql.y:1329 { yyLOCAL = []DatabaseOption{yyDollar[1].databaseOption} } @@ -11290,7 +11297,7 @@ yydefault: case 164: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []DatabaseOption -//line sql.y:1329 +//line sql.y:1333 { yyLOCAL = []DatabaseOption{yyDollar[1].databaseOption} } @@ -11298,28 +11305,28 @@ yydefault: case 165: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []DatabaseOption -//line sql.y:1333 +//line sql.y:1337 { yyLOCAL = []DatabaseOption{yyDollar[1].databaseOption} } yyVAL.union = yyLOCAL case 166: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1337 +//line sql.y:1341 { yySLICE := (*[]DatabaseOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].databaseOption) } case 167: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1341 +//line sql.y:1345 { yySLICE := (*[]DatabaseOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].databaseOption) } case 168: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1345 +//line sql.y:1349 { yySLICE := (*[]DatabaseOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].databaseOption) @@ -11327,7 +11334,7 @@ yydefault: case 169: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:1351 +//line sql.y:1355 { yyLOCAL = false } @@ -11335,51 +11342,51 @@ yydefault: case 170: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:1355 +//line sql.y:1359 { yyLOCAL = true } yyVAL.union = yyLOCAL case 171: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1361 +//line sql.y:1365 { yyVAL.databaseOption = DatabaseOption{Type: CharacterSetType, Value: string(yyDollar[4].str), IsDefault: yyDollar[1].booleanUnion()} } case 172: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1365 +//line sql.y:1369 { yyVAL.databaseOption = DatabaseOption{Type: CharacterSetType, Value: encodeSQLString(yyDollar[4].str), IsDefault: yyDollar[1].booleanUnion()} } case 173: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1371 +//line sql.y:1375 { yyVAL.databaseOption = DatabaseOption{Type: CollateType, Value: string(yyDollar[4].str), IsDefault: yyDollar[1].booleanUnion()} } case 174: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1375 +//line sql.y:1379 { yyVAL.databaseOption = DatabaseOption{Type: CollateType, Value: encodeSQLString(yyDollar[4].str), IsDefault: yyDollar[1].booleanUnion()} } case 175: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1381 +//line sql.y:1385 { yyVAL.databaseOption = DatabaseOption{Type: EncryptionType, Value: string(yyDollar[4].str), IsDefault: yyDollar[1].booleanUnion()} } case 176: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1385 +//line sql.y:1389 { yyVAL.databaseOption = DatabaseOption{Type: EncryptionType, Value: encodeSQLString(yyDollar[4].str), IsDefault: yyDollar[1].booleanUnion()} } case 177: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *OptLike -//line sql.y:1391 +//line sql.y:1395 { yyLOCAL = &OptLike{LikeTable: yyDollar[2].tableName} } @@ -11387,7 +11394,7 @@ yydefault: case 178: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *OptLike -//line sql.y:1395 +//line sql.y:1399 { yyLOCAL = &OptLike{LikeTable: yyDollar[3].tableName} } @@ -11395,14 +11402,14 @@ yydefault: case 179: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*ColumnDefinition -//line sql.y:1401 +//line sql.y:1405 { yyLOCAL = []*ColumnDefinition{yyDollar[1].columnDefinitionUnion()} } yyVAL.union = yyLOCAL case 180: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1405 +//line sql.y:1409 { yySLICE := (*[]*ColumnDefinition)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].columnDefinitionUnion()) @@ -11410,7 +11417,7 @@ yydefault: case 181: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *TableSpec -//line sql.y:1411 +//line sql.y:1415 { yyLOCAL = &TableSpec{} yyLOCAL.AddColumn(yyDollar[1].columnDefinitionUnion()) @@ -11419,7 +11426,7 @@ yydefault: case 182: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *TableSpec -//line sql.y:1416 +//line sql.y:1420 { yyLOCAL = &TableSpec{} yyLOCAL.AddConstraint(yyDollar[1].constraintDefinitionUnion()) @@ -11427,39 +11434,39 @@ yydefault: yyVAL.union = yyLOCAL case 183: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1421 +//line sql.y:1425 { yyVAL.tableSpecUnion().AddColumn(yyDollar[3].columnDefinitionUnion()) } case 184: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:1425 +//line sql.y:1429 { yyVAL.tableSpecUnion().AddColumn(yyDollar[3].columnDefinitionUnion()) yyVAL.tableSpecUnion().AddConstraint(yyDollar[4].constraintDefinitionUnion()) } case 185: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1430 +//line sql.y:1434 { yyVAL.tableSpecUnion().AddIndex(yyDollar[3].indexDefinitionUnion()) } case 186: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1434 +//line sql.y:1438 { yyVAL.tableSpecUnion().AddConstraint(yyDollar[3].constraintDefinitionUnion()) } case 187: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1438 +//line sql.y:1442 { yyVAL.tableSpecUnion().AddConstraint(yyDollar[3].constraintDefinitionUnion()) } case 188: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *ColumnDefinition -//line sql.y:1449 +//line sql.y:1453 { yyDollar[2].columnType.Options = yyDollar[4].columnTypeOptionsUnion() if yyDollar[2].columnType.Options.Collate == "" { @@ -11472,7 +11479,7 @@ yydefault: case 189: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL *ColumnDefinition -//line sql.y:1458 +//line sql.y:1462 { yyDollar[2].columnType.Options = yyDollar[9].columnTypeOptionsUnion() yyDollar[2].columnType.Options.As = yyDollar[7].exprUnion() @@ -11483,20 +11490,20 @@ yydefault: yyVAL.union = yyLOCAL case 190: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:1467 +//line sql.y:1471 { yyVAL.str = "" } case 191: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:1471 +//line sql.y:1475 { yyVAL.str = "" } case 192: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1480 +//line sql.y:1484 { yyLOCAL = &ColumnTypeOptions{Null: nil, Default: nil, OnUpdate: nil, Autoincrement: false, KeyOpt: ColKeyNone, Comment: nil, As: nil, Invisible: nil, Format: UnspecifiedFormat, EngineAttribute: nil, SecondaryEngineAttribute: nil} } @@ -11504,27 +11511,25 @@ yydefault: case 193: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1484 +//line sql.y:1488 { - val := true - yyDollar[1].columnTypeOptionsUnion().Null = &val + yyDollar[1].columnTypeOptionsUnion().Null = ptr.Of(true) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 194: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1490 +//line sql.y:1493 { - val := false - yyDollar[1].columnTypeOptionsUnion().Null = &val + yyDollar[1].columnTypeOptionsUnion().Null = ptr.Of(false) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 195: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1496 +//line sql.y:1498 { yyDollar[1].columnTypeOptionsUnion().Default = yyDollar[4].exprUnion() yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11533,7 +11538,7 @@ yydefault: case 196: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1501 +//line sql.y:1503 { yyDollar[1].columnTypeOptionsUnion().Default = yyDollar[3].exprUnion() yyDollar[1].columnTypeOptionsUnion().DefaultLiteral = true @@ -11543,7 +11548,7 @@ yydefault: case 197: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1507 +//line sql.y:1509 { yyDollar[1].columnTypeOptionsUnion().OnUpdate = yyDollar[4].exprUnion() yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11552,7 +11557,7 @@ yydefault: case 198: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1512 +//line sql.y:1514 { yyDollar[1].columnTypeOptionsUnion().Autoincrement = true yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11561,7 +11566,7 @@ yydefault: case 199: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1517 +//line sql.y:1519 { yyDollar[1].columnTypeOptionsUnion().Comment = NewStrLiteral(yyDollar[3].str) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11570,7 +11575,7 @@ yydefault: case 200: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1522 +//line sql.y:1524 { yyDollar[1].columnTypeOptionsUnion().KeyOpt = yyDollar[2].colKeyOptUnion() yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11578,14 +11583,14 @@ yydefault: yyVAL.union = yyLOCAL case 201: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1527 +//line sql.y:1529 { yyDollar[1].columnTypeOptionsUnion().Collate = encodeSQLString(yyDollar[3].str) } case 202: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1531 +//line sql.y:1533 { yyDollar[1].columnTypeOptionsUnion().Collate = string(yyDollar[3].identifierCI.String()) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11593,14 +11598,14 @@ yydefault: yyVAL.union = yyLOCAL case 203: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1536 +//line sql.y:1538 { yyDollar[1].columnTypeOptionsUnion().Format = yyDollar[3].columnFormatUnion() } case 204: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1540 +//line sql.y:1542 { yyDollar[1].columnTypeOptionsUnion().SRID = NewIntLiteral(yyDollar[3].str) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11609,20 +11614,18 @@ yydefault: case 205: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1545 +//line sql.y:1547 { - val := false - yyDollar[1].columnTypeOptionsUnion().Invisible = &val + yyDollar[1].columnTypeOptionsUnion().Invisible = ptr.Of(false) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 206: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1551 +//line sql.y:1552 { - val := true - yyDollar[1].columnTypeOptionsUnion().Invisible = &val + yyDollar[1].columnTypeOptionsUnion().Invisible = ptr.Of(true) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL @@ -11700,25 +11703,23 @@ yydefault: var yyLOCAL *ColumnTypeOptions //line sql.y:1599 { - val := true - yyDollar[1].columnTypeOptionsUnion().Null = &val + yyDollar[1].columnTypeOptionsUnion().Null = ptr.Of(true) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 217: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1605 +//line sql.y:1604 { - val := false - yyDollar[1].columnTypeOptionsUnion().Null = &val + yyDollar[1].columnTypeOptionsUnion().Null = ptr.Of(false) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 218: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1611 +//line sql.y:1609 { yyDollar[1].columnTypeOptionsUnion().Comment = NewStrLiteral(yyDollar[3].str) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11727,7 +11728,7 @@ yydefault: case 219: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1616 +//line sql.y:1614 { yyDollar[1].columnTypeOptionsUnion().KeyOpt = yyDollar[2].colKeyOptUnion() yyLOCAL = yyDollar[1].columnTypeOptionsUnion() @@ -11736,27 +11737,25 @@ yydefault: case 220: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1621 +//line sql.y:1619 { - val := false - yyDollar[1].columnTypeOptionsUnion().Invisible = &val + yyDollar[1].columnTypeOptionsUnion().Invisible = ptr.Of(false) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 221: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColumnTypeOptions -//line sql.y:1627 +//line sql.y:1624 { - val := true - yyDollar[1].columnTypeOptionsUnion().Invisible = &val + yyDollar[1].columnTypeOptionsUnion().Invisible = ptr.Of(true) yyLOCAL = yyDollar[1].columnTypeOptionsUnion() } yyVAL.union = yyLOCAL case 222: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1635 +//line sql.y:1631 { yyLOCAL = yyDollar[1].exprUnion() } @@ -11764,7 +11763,7 @@ yydefault: case 224: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1642 +//line sql.y:1638 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("current_timestamp"), Fsp: yyDollar[2].integerUnion()} } @@ -11772,7 +11771,7 @@ yydefault: case 225: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1646 +//line sql.y:1642 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("localtime"), Fsp: yyDollar[2].integerUnion()} } @@ -11780,7 +11779,7 @@ yydefault: case 226: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1650 +//line sql.y:1646 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("localtimestamp"), Fsp: yyDollar[2].integerUnion()} } @@ -11788,7 +11787,7 @@ yydefault: case 227: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1654 +//line sql.y:1650 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("utc_timestamp"), Fsp: yyDollar[2].integerUnion()} } @@ -11796,7 +11795,7 @@ yydefault: case 228: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1658 +//line sql.y:1654 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("now"), Fsp: yyDollar[2].integerUnion()} } @@ -11804,7 +11803,7 @@ yydefault: case 229: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1662 +//line sql.y:1658 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("sysdate"), Fsp: yyDollar[2].integerUnion()} } @@ -11812,7 +11811,7 @@ yydefault: case 232: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1672 +//line sql.y:1668 { yyLOCAL = &NullVal{} } @@ -11820,7 +11819,7 @@ yydefault: case 234: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1679 +//line sql.y:1675 { yyLOCAL = yyDollar[2].exprUnion() } @@ -11828,7 +11827,7 @@ yydefault: case 235: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1683 +//line sql.y:1679 { yyLOCAL = &UnaryExpr{Operator: UMinusOp, Expr: yyDollar[2].exprUnion()} } @@ -11836,7 +11835,7 @@ yydefault: case 236: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1689 +//line sql.y:1685 { yyLOCAL = yyDollar[1].exprUnion() } @@ -11844,7 +11843,7 @@ yydefault: case 237: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1693 +//line sql.y:1689 { yyLOCAL = yyDollar[1].exprUnion() } @@ -11852,7 +11851,7 @@ yydefault: case 238: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1697 +//line sql.y:1693 { yyLOCAL = yyDollar[1].boolValUnion() } @@ -11860,7 +11859,7 @@ yydefault: case 239: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1701 +//line sql.y:1697 { yyLOCAL = NewHexLiteral(yyDollar[1].str) } @@ -11868,7 +11867,7 @@ yydefault: case 240: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1705 +//line sql.y:1701 { yyLOCAL = NewHexNumLiteral(yyDollar[1].str) } @@ -11876,7 +11875,7 @@ yydefault: case 241: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1709 +//line sql.y:1705 { yyLOCAL = NewBitLiteral(yyDollar[1].str[2:]) } @@ -11884,7 +11883,7 @@ yydefault: case 242: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1713 +//line sql.y:1709 { yyLOCAL = NewBitLiteral(yyDollar[1].str) } @@ -11892,7 +11891,7 @@ yydefault: case 243: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1717 +//line sql.y:1713 { yyLOCAL = parseBindVariable(yylex, yyDollar[1].str[1:]) } @@ -11900,7 +11899,7 @@ yydefault: case 244: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1721 +//line sql.y:1717 { yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: NewBitLiteral(yyDollar[2].str)} } @@ -11908,7 +11907,7 @@ yydefault: case 245: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1725 +//line sql.y:1721 { yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: NewHexNumLiteral(yyDollar[2].str)} } @@ -11916,7 +11915,7 @@ yydefault: case 246: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1729 +//line sql.y:1725 { yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: NewBitLiteral(yyDollar[2].str[2:])} } @@ -11924,7 +11923,7 @@ yydefault: case 247: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1733 +//line sql.y:1729 { yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: NewHexLiteral(yyDollar[2].str)} } @@ -11932,7 +11931,7 @@ yydefault: case 248: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1737 +//line sql.y:1733 { yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: yyDollar[2].exprUnion()} } @@ -11940,7 +11939,7 @@ yydefault: case 249: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1741 +//line sql.y:1737 { arg := parseBindVariable(yylex, yyDollar[2].str[1:]) yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: arg} @@ -11949,7 +11948,7 @@ yydefault: case 250: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1746 +//line sql.y:1742 { yyLOCAL = NewDateLiteral(yyDollar[2].str) } @@ -11957,7 +11956,7 @@ yydefault: case 251: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1750 +//line sql.y:1746 { yyLOCAL = NewTimeLiteral(yyDollar[2].str) } @@ -11965,267 +11964,267 @@ yydefault: case 252: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1754 +//line sql.y:1750 { yyLOCAL = NewTimestampLiteral(yyDollar[2].str) } yyVAL.union = yyLOCAL case 253: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1760 +//line sql.y:1756 { yyVAL.str = Armscii8Str } case 254: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1764 +//line sql.y:1760 { yyVAL.str = ASCIIStr } case 255: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1768 +//line sql.y:1764 { yyVAL.str = Big5Str } case 256: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1772 +//line sql.y:1768 { yyVAL.str = UBinaryStr } case 257: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1776 +//line sql.y:1772 { yyVAL.str = Cp1250Str } case 258: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1780 +//line sql.y:1776 { yyVAL.str = Cp1251Str } case 259: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1784 +//line sql.y:1780 { yyVAL.str = Cp1256Str } case 260: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1788 +//line sql.y:1784 { yyVAL.str = Cp1257Str } case 261: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1792 +//line sql.y:1788 { yyVAL.str = Cp850Str } case 262: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1796 +//line sql.y:1792 { yyVAL.str = Cp852Str } case 263: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1800 +//line sql.y:1796 { yyVAL.str = Cp866Str } case 264: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1804 +//line sql.y:1800 { yyVAL.str = Cp932Str } case 265: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1808 +//line sql.y:1804 { yyVAL.str = Dec8Str } case 266: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1812 +//line sql.y:1808 { yyVAL.str = EucjpmsStr } case 267: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1816 +//line sql.y:1812 { yyVAL.str = EuckrStr } case 268: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1820 +//line sql.y:1816 { yyVAL.str = Gb18030Str } case 269: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1824 +//line sql.y:1820 { yyVAL.str = Gb2312Str } case 270: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1828 +//line sql.y:1824 { yyVAL.str = GbkStr } case 271: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1832 +//line sql.y:1828 { yyVAL.str = Geostd8Str } case 272: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1836 +//line sql.y:1832 { yyVAL.str = GreekStr } case 273: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1840 +//line sql.y:1836 { yyVAL.str = HebrewStr } case 274: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1844 +//line sql.y:1840 { yyVAL.str = Hp8Str } case 275: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1848 +//line sql.y:1844 { yyVAL.str = Keybcs2Str } case 276: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1852 +//line sql.y:1848 { yyVAL.str = Koi8rStr } case 277: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1856 +//line sql.y:1852 { yyVAL.str = Koi8uStr } case 278: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1860 +//line sql.y:1856 { yyVAL.str = Latin1Str } case 279: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1864 +//line sql.y:1860 { yyVAL.str = Latin2Str } case 280: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1868 +//line sql.y:1864 { yyVAL.str = Latin5Str } case 281: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1872 +//line sql.y:1868 { yyVAL.str = Latin7Str } case 282: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1876 +//line sql.y:1872 { yyVAL.str = MacceStr } case 283: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1880 +//line sql.y:1876 { yyVAL.str = MacromanStr } case 284: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1884 +//line sql.y:1880 { yyVAL.str = SjisStr } case 285: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1888 +//line sql.y:1884 { yyVAL.str = Swe7Str } case 286: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1892 +//line sql.y:1888 { yyVAL.str = Tis620Str } case 287: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1896 +//line sql.y:1892 { yyVAL.str = Ucs2Str } case 288: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1900 +//line sql.y:1896 { yyVAL.str = UjisStr } case 289: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1904 +//line sql.y:1900 { yyVAL.str = Utf16Str } case 290: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1908 +//line sql.y:1904 { yyVAL.str = Utf16leStr } case 291: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1912 +//line sql.y:1908 { yyVAL.str = Utf32Str } case 292: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1916 +//line sql.y:1912 { yyVAL.str = Utf8mb3Str } case 293: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1920 +//line sql.y:1916 { yyVAL.str = Utf8mb4Str } case 294: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:1924 +//line sql.y:1920 { yyVAL.str = Utf8mb3Str } case 297: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1934 +//line sql.y:1930 { yyLOCAL = NewIntLiteral(yyDollar[1].str) } @@ -12233,7 +12232,7 @@ yydefault: case 298: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1938 +//line sql.y:1934 { yyLOCAL = NewFloatLiteral(yyDollar[1].str) } @@ -12241,7 +12240,7 @@ yydefault: case 299: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1942 +//line sql.y:1938 { yyLOCAL = NewDecimalLiteral(yyDollar[1].str) } @@ -12249,7 +12248,7 @@ yydefault: case 300: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1948 +//line sql.y:1944 { yyLOCAL = NewStrLiteral(yyDollar[1].str) } @@ -12257,7 +12256,7 @@ yydefault: case 301: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1952 +//line sql.y:1948 { yyLOCAL = &UnaryExpr{Operator: NStringOp, Expr: NewStrLiteral(yyDollar[1].str)} } @@ -12265,7 +12264,7 @@ yydefault: case 302: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:1956 +//line sql.y:1952 { yyLOCAL = &IntroducerExpr{CharacterSet: yyDollar[1].str, Expr: NewStrLiteral(yyDollar[2].str)} } @@ -12273,7 +12272,7 @@ yydefault: case 303: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1962 +//line sql.y:1958 { yyLOCAL = yyDollar[1].exprUnion() } @@ -12281,7 +12280,7 @@ yydefault: case 304: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:1966 +//line sql.y:1962 { yyLOCAL = parseBindVariable(yylex, yyDollar[1].str[1:]) } @@ -12289,7 +12288,7 @@ yydefault: case 305: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL ColumnKeyOption -//line sql.y:1972 +//line sql.y:1968 { yyLOCAL = ColKeyPrimary } @@ -12297,7 +12296,7 @@ yydefault: case 306: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ColumnKeyOption -//line sql.y:1976 +//line sql.y:1972 { yyLOCAL = ColKeyUnique } @@ -12305,7 +12304,7 @@ yydefault: case 307: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL ColumnKeyOption -//line sql.y:1980 +//line sql.y:1976 { yyLOCAL = ColKeyUniqueKey } @@ -12313,14 +12312,14 @@ yydefault: case 308: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ColumnKeyOption -//line sql.y:1984 +//line sql.y:1980 { yyLOCAL = ColKey } yyVAL.union = yyLOCAL case 309: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:1990 +//line sql.y:1986 { yyVAL.columnType = yyDollar[1].columnType yyVAL.columnType.Unsigned = yyDollar[2].booleanUnion() @@ -12328,74 +12327,74 @@ yydefault: } case 313: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2001 +//line sql.y:1997 { yyVAL.columnType = yyDollar[1].columnType - yyVAL.columnType.Length = yyDollar[2].literalUnion() + yyVAL.columnType.Length = yyDollar[2].intPtrUnion() } case 314: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2006 +//line sql.y:2002 { yyVAL.columnType = yyDollar[1].columnType } case 315: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2012 +//line sql.y:2008 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 316: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2016 +//line sql.y:2012 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 317: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2020 +//line sql.y:2016 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 318: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2024 +//line sql.y:2020 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 319: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2028 +//line sql.y:2024 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 320: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2032 +//line sql.y:2028 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 321: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2036 +//line sql.y:2032 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 322: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2040 +//line sql.y:2036 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 323: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2044 +//line sql.y:2040 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 324: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2050 +//line sql.y:2046 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12403,7 +12402,7 @@ yydefault: } case 325: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2056 +//line sql.y:2052 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12411,7 +12410,7 @@ yydefault: } case 326: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2062 +//line sql.y:2058 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12419,7 +12418,7 @@ yydefault: } case 327: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2068 +//line sql.y:2064 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12427,7 +12426,7 @@ yydefault: } case 328: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2074 +//line sql.y:2070 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12435,7 +12434,7 @@ yydefault: } case 329: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2080 +//line sql.y:2076 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12443,7 +12442,7 @@ yydefault: } case 330: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2086 +//line sql.y:2082 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} yyVAL.columnType.Length = yyDollar[2].LengthScaleOption.Length @@ -12451,265 +12450,265 @@ yydefault: } case 331: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2094 +//line sql.y:2090 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 332: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2098 +//line sql.y:2094 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } case 333: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2102 +//line sql.y:2098 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } case 334: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2106 +//line sql.y:2102 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } case 335: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2110 +//line sql.y:2106 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } case 336: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2116 +//line sql.y:2112 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion(), Charset: yyDollar[3].columnCharset} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion(), Charset: yyDollar[3].columnCharset} } case 337: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2120 +//line sql.y:2116 { // CHAR BYTE is an alias for binary. See also: // https://dev.mysql.com/doc/refman/8.0/en/string-type-syntax.html - yyVAL.columnType = &ColumnType{Type: "binary", Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: "binary", Length: yyDollar[2].intPtrUnion()} } case 338: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2126 +//line sql.y:2122 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion(), Charset: yyDollar[3].columnCharset} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion(), Charset: yyDollar[3].columnCharset} } case 339: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2130 +//line sql.y:2126 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } case 340: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2134 +//line sql.y:2130 { - yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } case 341: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2138 +//line sql.y:2134 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Charset: yyDollar[2].columnCharset} } case 342: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2142 +//line sql.y:2138 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Charset: yyDollar[2].columnCharset} } case 343: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2146 +//line sql.y:2142 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Charset: yyDollar[2].columnCharset} } case 344: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2150 +//line sql.y:2146 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), Charset: yyDollar[2].columnCharset} } case 345: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2154 +//line sql.y:2150 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 346: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2158 +//line sql.y:2154 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 347: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2162 +//line sql.y:2158 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 348: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2166 +//line sql.y:2162 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 349: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2170 +//line sql.y:2166 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 350: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:2174 +//line sql.y:2170 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), EnumValues: yyDollar[3].strs, Charset: yyDollar[5].columnCharset} } case 351: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:2179 +//line sql.y:2175 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str), EnumValues: yyDollar[3].strs, Charset: yyDollar[5].columnCharset} } case 352: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2185 +//line sql.y:2181 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 353: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2189 +//line sql.y:2185 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 354: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2193 +//line sql.y:2189 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 355: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2197 +//line sql.y:2193 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 356: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2201 +//line sql.y:2197 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 357: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2205 +//line sql.y:2201 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 358: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2209 +//line sql.y:2205 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 359: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2213 +//line sql.y:2209 { yyVAL.columnType = &ColumnType{Type: string(yyDollar[1].str)} } case 360: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2219 +//line sql.y:2215 { yyVAL.strs = make([]string, 0, 4) yyVAL.strs = append(yyVAL.strs, encodeSQLString(yyDollar[1].str)) } case 361: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2224 +//line sql.y:2220 { yyVAL.strs = append(yyDollar[1].strs, encodeSQLString(yyDollar[3].str)) } case 362: yyDollar = yyS[yypt-0 : yypt+1] - var yyLOCAL *Literal -//line sql.y:2229 + var yyLOCAL *int +//line sql.y:2225 { yyLOCAL = nil } yyVAL.union = yyLOCAL case 363: yyDollar = yyS[yypt-3 : yypt+1] - var yyLOCAL *Literal -//line sql.y:2233 + var yyLOCAL *int +//line sql.y:2229 { - yyLOCAL = NewIntLiteral(yyDollar[2].str) + yyLOCAL = ptr.Of(convertStringToInt(yyDollar[2].str)) } yyVAL.union = yyLOCAL case 364: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2238 +//line sql.y:2234 { yyVAL.LengthScaleOption = LengthScaleOption{} } case 365: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:2242 +//line sql.y:2238 { yyVAL.LengthScaleOption = LengthScaleOption{ - Length: NewIntLiteral(yyDollar[2].str), - Scale: NewIntLiteral(yyDollar[4].str), + Length: ptr.Of(convertStringToInt(yyDollar[2].str)), + Scale: ptr.Of(convertStringToInt(yyDollar[4].str)), } } case 366: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2251 +//line sql.y:2247 { yyVAL.LengthScaleOption = yyDollar[1].LengthScaleOption } case 367: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2255 +//line sql.y:2251 { yyVAL.LengthScaleOption = LengthScaleOption{ - Length: NewIntLiteral(yyDollar[2].str), + Length: ptr.Of(convertStringToInt(yyDollar[2].str)), } } case 368: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2262 +//line sql.y:2258 { yyVAL.LengthScaleOption = LengthScaleOption{} } case 369: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2266 +//line sql.y:2262 { yyVAL.LengthScaleOption = LengthScaleOption{ - Length: NewIntLiteral(yyDollar[2].str), + Length: ptr.Of(convertStringToInt(yyDollar[2].str)), } } case 370: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:2272 +//line sql.y:2268 { yyVAL.LengthScaleOption = LengthScaleOption{ - Length: NewIntLiteral(yyDollar[2].str), - Scale: NewIntLiteral(yyDollar[4].str), + Length: ptr.Of(convertStringToInt(yyDollar[2].str)), + Scale: ptr.Of(convertStringToInt(yyDollar[4].str)), } } case 371: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:2280 +//line sql.y:2276 { yyLOCAL = false } @@ -12717,7 +12716,7 @@ yydefault: case 372: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2284 +//line sql.y:2280 { yyLOCAL = true } @@ -12725,7 +12724,7 @@ yydefault: case 373: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2288 +//line sql.y:2284 { yyLOCAL = false } @@ -12733,7 +12732,7 @@ yydefault: case 374: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:2293 +//line sql.y:2289 { yyLOCAL = false } @@ -12741,66 +12740,66 @@ yydefault: case 375: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2297 +//line sql.y:2293 { yyLOCAL = true } yyVAL.union = yyLOCAL case 376: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2302 +//line sql.y:2298 { yyVAL.columnCharset = ColumnCharset{} } case 377: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2306 +//line sql.y:2302 { yyVAL.columnCharset = ColumnCharset{Name: string(yyDollar[2].identifierCI.String()), Binary: yyDollar[3].booleanUnion()} } case 378: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2310 +//line sql.y:2306 { yyVAL.columnCharset = ColumnCharset{Name: encodeSQLString(yyDollar[2].str), Binary: yyDollar[3].booleanUnion()} } case 379: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2314 +//line sql.y:2310 { yyVAL.columnCharset = ColumnCharset{Name: string(yyDollar[2].str)} } case 380: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2318 +//line sql.y:2314 { // ASCII: Shorthand for CHARACTER SET latin1. yyVAL.columnCharset = ColumnCharset{Name: "latin1", Binary: yyDollar[2].booleanUnion()} } case 381: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2323 +//line sql.y:2319 { // UNICODE: Shorthand for CHARACTER SET ucs2. yyVAL.columnCharset = ColumnCharset{Name: "ucs2", Binary: yyDollar[2].booleanUnion()} } case 382: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2328 +//line sql.y:2324 { // BINARY: Shorthand for default CHARACTER SET but with binary collation yyVAL.columnCharset = ColumnCharset{Name: "", Binary: true} } case 383: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2333 +//line sql.y:2329 { // BINARY ASCII: Shorthand for CHARACTER SET latin1 with binary collation yyVAL.columnCharset = ColumnCharset{Name: "latin1", Binary: true} } case 384: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2338 +//line sql.y:2334 { // BINARY UNICODE: Shorthand for CHARACTER SET ucs2 with binary collation yyVAL.columnCharset = ColumnCharset{Name: "ucs2", Binary: true} @@ -12808,7 +12807,7 @@ yydefault: case 385: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:2344 +//line sql.y:2340 { yyLOCAL = false } @@ -12816,33 +12815,33 @@ yydefault: case 386: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2348 +//line sql.y:2344 { yyLOCAL = true } yyVAL.union = yyLOCAL case 387: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2353 +//line sql.y:2349 { yyVAL.str = "" } case 388: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2357 +//line sql.y:2353 { yyVAL.str = string(yyDollar[2].identifierCI.String()) } case 389: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2361 +//line sql.y:2357 { yyVAL.str = encodeSQLString(yyDollar[2].str) } case 390: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *IndexDefinition -//line sql.y:2367 +//line sql.y:2363 { yyLOCAL = &IndexDefinition{Info: yyDollar[1].indexInfoUnion(), Columns: yyDollar[3].indexColumnsUnion(), Options: yyDollar[5].indexOptionsUnion()} } @@ -12850,7 +12849,7 @@ yydefault: case 391: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []*IndexOption -//line sql.y:2372 +//line sql.y:2368 { yyLOCAL = nil } @@ -12858,7 +12857,7 @@ yydefault: case 392: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*IndexOption -//line sql.y:2376 +//line sql.y:2372 { yyLOCAL = yyDollar[1].indexOptionsUnion() } @@ -12866,14 +12865,14 @@ yydefault: case 393: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*IndexOption -//line sql.y:2382 +//line sql.y:2378 { yyLOCAL = []*IndexOption{yyDollar[1].indexOptionUnion()} } yyVAL.union = yyLOCAL case 394: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2386 +//line sql.y:2382 { yySLICE := (*[]*IndexOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].indexOptionUnion()) @@ -12881,7 +12880,7 @@ yydefault: case 395: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2392 +//line sql.y:2388 { yyLOCAL = yyDollar[1].indexOptionUnion() } @@ -12889,7 +12888,7 @@ yydefault: case 396: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2396 +//line sql.y:2392 { // should not be string yyLOCAL = &IndexOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} @@ -12898,7 +12897,7 @@ yydefault: case 397: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2401 +//line sql.y:2397 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[2].str)} } @@ -12906,7 +12905,7 @@ yydefault: case 398: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2405 +//line sql.y:2401 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str)} } @@ -12914,7 +12913,7 @@ yydefault: case 399: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2409 +//line sql.y:2405 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str)} } @@ -12922,7 +12921,7 @@ yydefault: case 400: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2413 +//line sql.y:2409 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str) + " " + string(yyDollar[2].str), String: yyDollar[3].identifierCI.String()} } @@ -12930,7 +12929,7 @@ yydefault: case 401: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2417 +//line sql.y:2413 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -12938,27 +12937,27 @@ yydefault: case 402: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:2421 +//line sql.y:2417 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } yyVAL.union = yyLOCAL case 403: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2427 +//line sql.y:2423 { yyVAL.str = "" } case 404: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2431 +//line sql.y:2427 { yyVAL.str = string(yyDollar[1].str) } case 405: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *IndexInfo -//line sql.y:2437 +//line sql.y:2433 { yyLOCAL = &IndexInfo{Type: string(yyDollar[2].str) + " " + string(yyDollar[3].str), ConstraintName: NewIdentifierCI(yyDollar[1].str), Name: NewIdentifierCI("PRIMARY"), Primary: true, Unique: true} } @@ -12966,7 +12965,7 @@ yydefault: case 406: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexInfo -//line sql.y:2441 +//line sql.y:2437 { yyLOCAL = &IndexInfo{Type: string(yyDollar[1].str) + " " + string(yyDollar[2].str), Name: NewIdentifierCI(yyDollar[3].str), Spatial: true, Unique: false} } @@ -12974,7 +12973,7 @@ yydefault: case 407: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexInfo -//line sql.y:2445 +//line sql.y:2441 { yyLOCAL = &IndexInfo{Type: string(yyDollar[1].str) + " " + string(yyDollar[2].str), Name: NewIdentifierCI(yyDollar[3].str), Fulltext: true, Unique: false} } @@ -12982,7 +12981,7 @@ yydefault: case 408: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *IndexInfo -//line sql.y:2449 +//line sql.y:2445 { yyLOCAL = &IndexInfo{Type: string(yyDollar[2].str) + " " + string(yyDollar[3].str), ConstraintName: NewIdentifierCI(yyDollar[1].str), Name: NewIdentifierCI(yyDollar[4].str), Unique: true} } @@ -12990,100 +12989,100 @@ yydefault: case 409: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *IndexInfo -//line sql.y:2453 +//line sql.y:2449 { yyLOCAL = &IndexInfo{Type: string(yyDollar[1].str), Name: NewIdentifierCI(yyDollar[2].str), Unique: false} } yyVAL.union = yyLOCAL case 410: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2458 +//line sql.y:2454 { yyVAL.str = "" } case 411: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2462 +//line sql.y:2458 { yyVAL.str = yyDollar[2].str } case 412: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2468 +//line sql.y:2464 { yyVAL.str = string(yyDollar[1].str) } case 413: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2472 +//line sql.y:2468 { yyVAL.str = string(yyDollar[1].str) } case 414: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2476 +//line sql.y:2472 { yyVAL.str = string(yyDollar[1].str) } case 415: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2482 +//line sql.y:2478 { yyVAL.str = string(yyDollar[1].str) } case 416: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2486 +//line sql.y:2482 { yyVAL.str = string(yyDollar[1].str) } case 417: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2491 +//line sql.y:2487 { yyVAL.str = "key" } case 418: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2495 +//line sql.y:2491 { yyVAL.str = yyDollar[1].str } case 419: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2501 +//line sql.y:2497 { yyVAL.str = string(yyDollar[1].str) } case 420: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2505 +//line sql.y:2501 { yyVAL.str = string(yyDollar[1].str) } case 421: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2510 +//line sql.y:2506 { yyVAL.str = "" } case 422: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2514 +//line sql.y:2510 { yyVAL.str = string(yyDollar[1].identifierCI.String()) } case 423: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*IndexColumn -//line sql.y:2520 +//line sql.y:2516 { yyLOCAL = []*IndexColumn{yyDollar[1].indexColumnUnion()} } yyVAL.union = yyLOCAL case 424: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2524 +//line sql.y:2520 { yySLICE := (*[]*IndexColumn)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].indexColumnUnion()) @@ -13091,15 +13090,15 @@ yydefault: case 425: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *IndexColumn -//line sql.y:2530 +//line sql.y:2526 { - yyLOCAL = &IndexColumn{Column: yyDollar[1].identifierCI, Length: yyDollar[2].literalUnion(), Direction: yyDollar[3].orderDirectionUnion()} + yyLOCAL = &IndexColumn{Column: yyDollar[1].identifierCI, Length: yyDollar[2].intPtrUnion(), Direction: yyDollar[3].orderDirectionUnion()} } yyVAL.union = yyLOCAL case 426: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *IndexColumn -//line sql.y:2534 +//line sql.y:2530 { yyLOCAL = &IndexColumn{Expression: yyDollar[2].exprUnion(), Direction: yyDollar[4].orderDirectionUnion()} } @@ -13107,7 +13106,7 @@ yydefault: case 427: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ConstraintDefinition -//line sql.y:2540 +//line sql.y:2536 { yyLOCAL = &ConstraintDefinition{Name: yyDollar[2].identifierCI, Details: yyDollar[3].constraintInfoUnion()} } @@ -13115,7 +13114,7 @@ yydefault: case 428: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConstraintDefinition -//line sql.y:2544 +//line sql.y:2540 { yyLOCAL = &ConstraintDefinition{Details: yyDollar[1].constraintInfoUnion()} } @@ -13123,7 +13122,7 @@ yydefault: case 429: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ConstraintDefinition -//line sql.y:2550 +//line sql.y:2546 { yyLOCAL = &ConstraintDefinition{Name: yyDollar[2].identifierCI, Details: yyDollar[3].constraintInfoUnion()} } @@ -13131,7 +13130,7 @@ yydefault: case 430: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConstraintDefinition -//line sql.y:2554 +//line sql.y:2550 { yyLOCAL = &ConstraintDefinition{Details: yyDollar[1].constraintInfoUnion()} } @@ -13139,7 +13138,7 @@ yydefault: case 431: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL ConstraintInfo -//line sql.y:2560 +//line sql.y:2556 { yyLOCAL = &ForeignKeyDefinition{IndexName: NewIdentifierCI(yyDollar[3].str), Source: yyDollar[5].columnsUnion(), ReferenceDefinition: yyDollar[7].referenceDefinitionUnion()} } @@ -13147,7 +13146,7 @@ yydefault: case 432: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2566 +//line sql.y:2562 { yyLOCAL = &ReferenceDefinition{ReferencedTable: yyDollar[2].tableName, ReferencedColumns: yyDollar[4].columnsUnion(), Match: yyDollar[6].matchActionUnion()} } @@ -13155,7 +13154,7 @@ yydefault: case 433: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2570 +//line sql.y:2566 { yyLOCAL = &ReferenceDefinition{ReferencedTable: yyDollar[2].tableName, ReferencedColumns: yyDollar[4].columnsUnion(), Match: yyDollar[6].matchActionUnion(), OnDelete: yyDollar[7].referenceActionUnion()} } @@ -13163,7 +13162,7 @@ yydefault: case 434: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2574 +//line sql.y:2570 { yyLOCAL = &ReferenceDefinition{ReferencedTable: yyDollar[2].tableName, ReferencedColumns: yyDollar[4].columnsUnion(), Match: yyDollar[6].matchActionUnion(), OnUpdate: yyDollar[7].referenceActionUnion()} } @@ -13171,7 +13170,7 @@ yydefault: case 435: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2578 +//line sql.y:2574 { yyLOCAL = &ReferenceDefinition{ReferencedTable: yyDollar[2].tableName, ReferencedColumns: yyDollar[4].columnsUnion(), Match: yyDollar[6].matchActionUnion(), OnDelete: yyDollar[7].referenceActionUnion(), OnUpdate: yyDollar[8].referenceActionUnion()} } @@ -13179,7 +13178,7 @@ yydefault: case 436: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2582 +//line sql.y:2578 { yyLOCAL = &ReferenceDefinition{ReferencedTable: yyDollar[2].tableName, ReferencedColumns: yyDollar[4].columnsUnion(), Match: yyDollar[6].matchActionUnion(), OnUpdate: yyDollar[7].referenceActionUnion(), OnDelete: yyDollar[8].referenceActionUnion()} } @@ -13187,7 +13186,7 @@ yydefault: case 437: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2587 +//line sql.y:2583 { yyLOCAL = nil } @@ -13195,7 +13194,7 @@ yydefault: case 438: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ReferenceDefinition -//line sql.y:2591 +//line sql.y:2587 { yyLOCAL = yyDollar[1].referenceDefinitionUnion() } @@ -13203,7 +13202,7 @@ yydefault: case 439: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL ConstraintInfo -//line sql.y:2597 +//line sql.y:2593 { yyLOCAL = &CheckConstraintDefinition{Expr: yyDollar[3].exprUnion(), Enforced: yyDollar[5].booleanUnion()} } @@ -13211,7 +13210,7 @@ yydefault: case 440: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL MatchAction -//line sql.y:2603 +//line sql.y:2599 { yyLOCAL = yyDollar[2].matchActionUnion() } @@ -13219,7 +13218,7 @@ yydefault: case 441: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL MatchAction -//line sql.y:2609 +//line sql.y:2605 { yyLOCAL = Full } @@ -13227,7 +13226,7 @@ yydefault: case 442: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL MatchAction -//line sql.y:2613 +//line sql.y:2609 { yyLOCAL = Partial } @@ -13235,7 +13234,7 @@ yydefault: case 443: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL MatchAction -//line sql.y:2617 +//line sql.y:2613 { yyLOCAL = Simple } @@ -13243,7 +13242,7 @@ yydefault: case 444: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL MatchAction -//line sql.y:2622 +//line sql.y:2618 { yyLOCAL = DefaultMatch } @@ -13251,7 +13250,7 @@ yydefault: case 445: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL MatchAction -//line sql.y:2626 +//line sql.y:2622 { yyLOCAL = yyDollar[1].matchActionUnion() } @@ -13259,7 +13258,7 @@ yydefault: case 446: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2632 +//line sql.y:2628 { yyLOCAL = yyDollar[3].referenceActionUnion() } @@ -13267,7 +13266,7 @@ yydefault: case 447: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2638 +//line sql.y:2634 { yyLOCAL = yyDollar[3].referenceActionUnion() } @@ -13275,7 +13274,7 @@ yydefault: case 448: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2644 +//line sql.y:2640 { yyLOCAL = Restrict } @@ -13283,7 +13282,7 @@ yydefault: case 449: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2648 +//line sql.y:2644 { yyLOCAL = Cascade } @@ -13291,7 +13290,7 @@ yydefault: case 450: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2652 +//line sql.y:2648 { yyLOCAL = NoAction } @@ -13299,7 +13298,7 @@ yydefault: case 451: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2656 +//line sql.y:2652 { yyLOCAL = SetDefault } @@ -13307,33 +13306,33 @@ yydefault: case 452: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL ReferenceAction -//line sql.y:2660 +//line sql.y:2656 { yyLOCAL = SetNull } yyVAL.union = yyLOCAL case 453: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2665 +//line sql.y:2661 { yyVAL.str = "" } case 454: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2669 +//line sql.y:2665 { yyVAL.str = string(yyDollar[1].str) } case 455: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2673 +//line sql.y:2669 { yyVAL.str = string(yyDollar[1].str) } case 456: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:2678 +//line sql.y:2674 { yyLOCAL = false } @@ -13341,7 +13340,7 @@ yydefault: case 457: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2682 +//line sql.y:2678 { yyLOCAL = true } @@ -13349,7 +13348,7 @@ yydefault: case 458: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2688 +//line sql.y:2684 { yyLOCAL = true } @@ -13357,7 +13356,7 @@ yydefault: case 459: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:2692 +//line sql.y:2688 { yyLOCAL = false } @@ -13365,7 +13364,7 @@ yydefault: case 460: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:2697 +//line sql.y:2693 { yyLOCAL = true } @@ -13373,7 +13372,7 @@ yydefault: case 461: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2701 +//line sql.y:2697 { yyLOCAL = yyDollar[1].booleanUnion() } @@ -13381,7 +13380,7 @@ yydefault: case 462: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL TableOptions -//line sql.y:2706 +//line sql.y:2702 { yyLOCAL = nil } @@ -13389,7 +13388,7 @@ yydefault: case 463: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableOptions -//line sql.y:2710 +//line sql.y:2706 { yyLOCAL = yyDollar[1].tableOptionsUnion() } @@ -13397,21 +13396,21 @@ yydefault: case 464: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableOptions -//line sql.y:2716 +//line sql.y:2712 { yyLOCAL = TableOptions{yyDollar[1].tableOptionUnion()} } yyVAL.union = yyLOCAL case 465: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2720 +//line sql.y:2716 { yySLICE := (*TableOptions)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].tableOptionUnion()) } case 466: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2724 +//line sql.y:2720 { yySLICE := (*TableOptions)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].tableOptionUnion()) @@ -13419,14 +13418,14 @@ yydefault: case 467: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableOptions -//line sql.y:2730 +//line sql.y:2726 { yyLOCAL = TableOptions{yyDollar[1].tableOptionUnion()} } yyVAL.union = yyLOCAL case 468: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2734 +//line sql.y:2730 { yySLICE := (*TableOptions)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].tableOptionUnion()) @@ -13434,7 +13433,7 @@ yydefault: case 469: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2740 +//line sql.y:2736 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13442,7 +13441,7 @@ yydefault: case 470: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2744 +//line sql.y:2740 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13450,7 +13449,7 @@ yydefault: case 471: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2748 +//line sql.y:2744 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13458,7 +13457,7 @@ yydefault: case 472: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2752 +//line sql.y:2748 { yyLOCAL = &TableOption{Name: (string(yyDollar[2].str)), String: yyDollar[4].str, CaseSensitive: true} } @@ -13466,7 +13465,7 @@ yydefault: case 473: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2756 +//line sql.y:2752 { yyLOCAL = &TableOption{Name: string(yyDollar[2].str), String: yyDollar[4].str, CaseSensitive: true} } @@ -13474,7 +13473,7 @@ yydefault: case 474: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2760 +//line sql.y:2756 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13482,7 +13481,7 @@ yydefault: case 475: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2764 +//line sql.y:2760 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13490,7 +13489,7 @@ yydefault: case 476: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2768 +//line sql.y:2764 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13498,7 +13497,7 @@ yydefault: case 477: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2772 +//line sql.y:2768 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13506,7 +13505,7 @@ yydefault: case 478: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2776 +//line sql.y:2772 { yyLOCAL = &TableOption{Name: (string(yyDollar[1].str) + " " + string(yyDollar[2].str)), Value: NewStrLiteral(yyDollar[4].str)} } @@ -13514,7 +13513,7 @@ yydefault: case 479: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2780 +//line sql.y:2776 { yyLOCAL = &TableOption{Name: (string(yyDollar[1].str) + " " + string(yyDollar[2].str)), Value: NewStrLiteral(yyDollar[4].str)} } @@ -13522,7 +13521,7 @@ yydefault: case 480: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2784 +//line sql.y:2780 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13530,7 +13529,7 @@ yydefault: case 481: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2788 +//line sql.y:2784 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13538,7 +13537,7 @@ yydefault: case 482: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2792 +//line sql.y:2788 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: yyDollar[3].identifierCS.String(), CaseSensitive: true} } @@ -13546,7 +13545,7 @@ yydefault: case 483: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2796 +//line sql.y:2792 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13554,7 +13553,7 @@ yydefault: case 484: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2800 +//line sql.y:2796 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: string(yyDollar[3].str)} } @@ -13562,7 +13561,7 @@ yydefault: case 485: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2804 +//line sql.y:2800 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13570,7 +13569,7 @@ yydefault: case 486: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2808 +//line sql.y:2804 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13578,7 +13577,7 @@ yydefault: case 487: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2812 +//line sql.y:2808 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13586,7 +13585,7 @@ yydefault: case 488: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2816 +//line sql.y:2812 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13594,7 +13593,7 @@ yydefault: case 489: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2820 +//line sql.y:2816 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: string(yyDollar[3].str)} } @@ -13602,7 +13601,7 @@ yydefault: case 490: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2824 +//line sql.y:2820 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13610,7 +13609,7 @@ yydefault: case 491: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2828 +//line sql.y:2824 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: string(yyDollar[3].str)} } @@ -13618,7 +13617,7 @@ yydefault: case 492: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2832 +//line sql.y:2828 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewStrLiteral(yyDollar[3].str)} } @@ -13626,7 +13625,7 @@ yydefault: case 493: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2836 +//line sql.y:2832 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13634,7 +13633,7 @@ yydefault: case 494: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2840 +//line sql.y:2836 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: string(yyDollar[3].str)} } @@ -13642,7 +13641,7 @@ yydefault: case 495: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2844 +//line sql.y:2840 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13650,7 +13649,7 @@ yydefault: case 496: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2848 +//line sql.y:2844 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: string(yyDollar[3].str)} } @@ -13658,7 +13657,7 @@ yydefault: case 497: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2852 +//line sql.y:2848 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Value: NewIntLiteral(yyDollar[3].str)} } @@ -13666,7 +13665,7 @@ yydefault: case 498: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2856 +//line sql.y:2852 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: (yyDollar[3].identifierCI.String() + yyDollar[4].str), CaseSensitive: true} } @@ -13674,7 +13673,7 @@ yydefault: case 499: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2860 +//line sql.y:2856 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), Tables: yyDollar[4].tableNamesUnion()} } @@ -13682,7 +13681,7 @@ yydefault: case 500: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2864 +//line sql.y:2860 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), DBPartitionOption: yyDollar[3].partitionOptionUnion()} } @@ -13690,7 +13689,7 @@ yydefault: case 501: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2868 +//line sql.y:2864 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), TBPartitionOption: yyDollar[3].partitionOptionUnion(), Value: NewIntLiteral(yyDollar[5].str)} } @@ -13698,7 +13697,7 @@ yydefault: case 502: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2872 +//line sql.y:2868 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), DistributionPrimaryKeyOption: yyDollar[4].distributionPrimaryKeyOptionUnion()} } @@ -13706,7 +13705,7 @@ yydefault: case 503: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2876 +//line sql.y:2872 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str)} } @@ -13714,63 +13713,63 @@ yydefault: case 504: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *TableOption -//line sql.y:2880 +//line sql.y:2876 { yyLOCAL = &TableOption{Name: string(yyDollar[1].str), String: encodeSQLString(yyDollar[2].str)} } yyVAL.union = yyLOCAL case 505: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2886 +//line sql.y:2882 { yyVAL.str = "" } case 506: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2890 +//line sql.y:2886 { yyVAL.str = " " + string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 507: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2894 +//line sql.y:2890 { yyVAL.str = " " + string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 517: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2913 +//line sql.y:2909 { yyVAL.str = String(TableName{Qualifier: yyDollar[1].identifierCS, Name: yyDollar[3].identifierCS}) } case 518: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2917 +//line sql.y:2913 { yyVAL.str = yyDollar[1].identifierCI.String() } case 519: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2921 +//line sql.y:2917 { yyVAL.str = encodeSQLString(yyDollar[1].str) } case 520: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:2925 +//line sql.y:2921 { yyVAL.str = string(yyDollar[1].str) } case 521: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2930 +//line sql.y:2926 { yyVAL.str = "" } case 523: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:2936 +//line sql.y:2932 { yyLOCAL = false } @@ -13778,7 +13777,7 @@ yydefault: case 524: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:2940 +//line sql.y:2936 { yyLOCAL = true } @@ -13786,7 +13785,7 @@ yydefault: case 525: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ColName -//line sql.y:2945 +//line sql.y:2941 { yyLOCAL = nil } @@ -13794,27 +13793,27 @@ yydefault: case 526: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ColName -//line sql.y:2949 +//line sql.y:2945 { yyLOCAL = yyDollar[2].colNameUnion() } yyVAL.union = yyLOCAL case 527: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:2954 +//line sql.y:2950 { yyVAL.str = "" } case 528: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:2958 +//line sql.y:2954 { yyVAL.str = string(yyDollar[2].str) } case 529: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *Literal -//line sql.y:2963 +//line sql.y:2959 { yyLOCAL = nil } @@ -13822,7 +13821,7 @@ yydefault: case 530: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Literal -//line sql.y:2967 +//line sql.y:2963 { yyLOCAL = NewIntLiteral(yyDollar[2].str) } @@ -13830,7 +13829,7 @@ yydefault: case 531: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Literal -//line sql.y:2971 +//line sql.y:2967 { yyLOCAL = NewDecimalLiteral(yyDollar[2].str) } @@ -13838,7 +13837,7 @@ yydefault: case 532: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:2976 +//line sql.y:2972 { yyLOCAL = nil } @@ -13846,14 +13845,14 @@ yydefault: case 533: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:2980 +//line sql.y:2976 { yyLOCAL = yyDollar[1].alterOptionsUnion() } yyVAL.union = yyLOCAL case 534: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:2984 +//line sql.y:2980 { yySLICE := (*[]AlterOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, &OrderByOption{Cols: yyDollar[5].columnsUnion()}) @@ -13861,14 +13860,14 @@ yydefault: case 535: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:2988 +//line sql.y:2984 { yyLOCAL = yyDollar[1].alterOptionsUnion() } yyVAL.union = yyLOCAL case 536: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:2992 +//line sql.y:2988 { yySLICE := (*[]AlterOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].alterOptionsUnion()...) @@ -13876,7 +13875,7 @@ yydefault: case 537: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:2996 +//line sql.y:2992 { yyLOCAL = append(append(yyDollar[1].alterOptionsUnion(), yyDollar[3].alterOptionsUnion()...), &OrderByOption{Cols: yyDollar[7].columnsUnion()}) } @@ -13884,21 +13883,21 @@ yydefault: case 538: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:3002 +//line sql.y:2998 { yyLOCAL = []AlterOption{yyDollar[1].alterOptionUnion()} } yyVAL.union = yyLOCAL case 539: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:3006 +//line sql.y:3002 { yySLICE := (*[]AlterOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].alterOptionUnion()) } case 540: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:3010 +//line sql.y:3006 { yySLICE := (*[]AlterOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].alterOptionUnion()) @@ -13906,7 +13905,7 @@ yydefault: case 541: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3016 +//line sql.y:3012 { yyLOCAL = yyDollar[1].tableOptionsUnion() } @@ -13914,7 +13913,7 @@ yydefault: case 542: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3020 +//line sql.y:3016 { yyLOCAL = &AddConstraintDefinition{ConstraintDefinition: yyDollar[2].constraintDefinitionUnion()} } @@ -13922,7 +13921,7 @@ yydefault: case 543: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3024 +//line sql.y:3020 { yyLOCAL = &AddConstraintDefinition{ConstraintDefinition: yyDollar[2].constraintDefinitionUnion()} } @@ -13930,7 +13929,7 @@ yydefault: case 544: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3028 +//line sql.y:3024 { yyLOCAL = &AddIndexDefinition{IndexDefinition: yyDollar[2].indexDefinitionUnion()} } @@ -13938,7 +13937,7 @@ yydefault: case 545: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3032 +//line sql.y:3028 { yyLOCAL = &AddColumns{Columns: yyDollar[4].columnDefinitionsUnion()} } @@ -13946,7 +13945,7 @@ yydefault: case 546: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3036 +//line sql.y:3032 { yyLOCAL = &AddColumns{Columns: []*ColumnDefinition{yyDollar[3].columnDefinitionUnion()}, First: yyDollar[4].booleanUnion(), After: yyDollar[5].colNameUnion()} } @@ -13954,7 +13953,7 @@ yydefault: case 547: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3040 +//line sql.y:3036 { yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), DropDefault: true} } @@ -13962,7 +13961,7 @@ yydefault: case 548: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3044 +//line sql.y:3040 { yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), DropDefault: false, DefaultVal: yyDollar[6].exprUnion(), DefaultLiteral: true} } @@ -13970,7 +13969,7 @@ yydefault: case 549: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3048 +//line sql.y:3044 { yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), DropDefault: false, DefaultVal: yyDollar[7].exprUnion()} } @@ -13978,25 +13977,23 @@ yydefault: case 550: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3052 +//line sql.y:3048 { - val := false - yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), Invisible: &val} + yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), Invisible: ptr.Of(false)} } yyVAL.union = yyLOCAL case 551: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3057 +//line sql.y:3052 { - val := true - yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), Invisible: &val} + yyLOCAL = &AlterColumn{Column: yyDollar[3].colNameUnion(), Invisible: ptr.Of(true)} } yyVAL.union = yyLOCAL case 552: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3062 +//line sql.y:3056 { yyLOCAL = &AlterCheck{Name: yyDollar[3].identifierCI, Enforced: yyDollar[4].booleanUnion()} } @@ -14004,7 +14001,7 @@ yydefault: case 553: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3066 +//line sql.y:3060 { yyLOCAL = &AlterIndex{Name: yyDollar[3].identifierCI, Invisible: false} } @@ -14012,7 +14009,7 @@ yydefault: case 554: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3070 +//line sql.y:3064 { yyLOCAL = &AlterIndex{Name: yyDollar[3].identifierCI, Invisible: true} } @@ -14020,7 +14017,7 @@ yydefault: case 555: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3074 +//line sql.y:3068 { yyLOCAL = &ChangeColumn{OldColumn: yyDollar[3].colNameUnion(), NewColDefinition: yyDollar[4].columnDefinitionUnion(), First: yyDollar[5].booleanUnion(), After: yyDollar[6].colNameUnion()} } @@ -14028,7 +14025,7 @@ yydefault: case 556: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3078 +//line sql.y:3072 { yyLOCAL = &ModifyColumn{NewColDefinition: yyDollar[3].columnDefinitionUnion(), First: yyDollar[4].booleanUnion(), After: yyDollar[5].colNameUnion()} } @@ -14036,7 +14033,7 @@ yydefault: case 557: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3082 +//line sql.y:3076 { yyLOCAL = &RenameColumn{OldName: yyDollar[3].colNameUnion(), NewName: yyDollar[5].colNameUnion()} } @@ -14044,7 +14041,7 @@ yydefault: case 558: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3086 +//line sql.y:3080 { yyLOCAL = &AlterCharset{CharacterSet: yyDollar[4].str, Collate: yyDollar[5].str} } @@ -14052,7 +14049,7 @@ yydefault: case 559: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3090 +//line sql.y:3084 { yyLOCAL = &KeyState{Enable: false} } @@ -14060,7 +14057,7 @@ yydefault: case 560: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3094 +//line sql.y:3088 { yyLOCAL = &KeyState{Enable: true} } @@ -14068,7 +14065,7 @@ yydefault: case 561: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3098 +//line sql.y:3092 { yyLOCAL = &TablespaceOperation{Import: false} } @@ -14076,7 +14073,7 @@ yydefault: case 562: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3102 +//line sql.y:3096 { yyLOCAL = &TablespaceOperation{Import: true} } @@ -14084,7 +14081,7 @@ yydefault: case 563: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3106 +//line sql.y:3100 { yyLOCAL = &DropColumn{Name: yyDollar[3].colNameUnion()} } @@ -14092,7 +14089,7 @@ yydefault: case 564: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3110 +//line sql.y:3104 { yyLOCAL = &DropKey{Type: NormalKeyType, Name: yyDollar[3].identifierCI} } @@ -14100,7 +14097,7 @@ yydefault: case 565: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3114 +//line sql.y:3108 { yyLOCAL = &DropKey{Type: PrimaryKeyType} } @@ -14108,7 +14105,7 @@ yydefault: case 566: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3118 +//line sql.y:3112 { yyLOCAL = &DropKey{Type: ForeignKeyType, Name: yyDollar[4].identifierCI} } @@ -14116,7 +14113,7 @@ yydefault: case 567: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3122 +//line sql.y:3116 { yyLOCAL = &DropKey{Type: CheckKeyType, Name: yyDollar[3].identifierCI} } @@ -14124,7 +14121,7 @@ yydefault: case 568: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3126 +//line sql.y:3120 { yyLOCAL = &DropKey{Type: CheckKeyType, Name: yyDollar[3].identifierCI} } @@ -14132,7 +14129,7 @@ yydefault: case 569: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3130 +//line sql.y:3124 { yyLOCAL = &Force{} } @@ -14140,7 +14137,7 @@ yydefault: case 570: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3134 +//line sql.y:3128 { yyLOCAL = &RenameTableName{Table: yyDollar[3].tableName} } @@ -14148,7 +14145,7 @@ yydefault: case 571: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3138 +//line sql.y:3132 { yyLOCAL = &RenameIndex{OldName: yyDollar[3].identifierCI, NewName: yyDollar[5].identifierCI} } @@ -14156,14 +14153,14 @@ yydefault: case 572: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:3144 +//line sql.y:3138 { yyLOCAL = []AlterOption{yyDollar[1].alterOptionUnion()} } yyVAL.union = yyLOCAL case 573: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:3148 +//line sql.y:3142 { yySLICE := (*[]AlterOption)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].alterOptionUnion()) @@ -14171,7 +14168,7 @@ yydefault: case 574: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3154 +//line sql.y:3148 { yyLOCAL = AlgorithmValue(string(yyDollar[3].str)) } @@ -14179,7 +14176,7 @@ yydefault: case 575: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3158 +//line sql.y:3152 { yyLOCAL = AlgorithmValue(string(yyDollar[3].str)) } @@ -14187,7 +14184,7 @@ yydefault: case 576: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3162 +//line sql.y:3156 { yyLOCAL = AlgorithmValue(string(yyDollar[3].str)) } @@ -14195,7 +14192,7 @@ yydefault: case 577: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3166 +//line sql.y:3160 { yyLOCAL = AlgorithmValue(string(yyDollar[3].str)) } @@ -14203,7 +14200,7 @@ yydefault: case 578: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3170 +//line sql.y:3164 { yyLOCAL = &LockOption{Type: DefaultType} } @@ -14211,7 +14208,7 @@ yydefault: case 579: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3174 +//line sql.y:3168 { yyLOCAL = &LockOption{Type: NoneType} } @@ -14219,7 +14216,7 @@ yydefault: case 580: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3178 +//line sql.y:3172 { yyLOCAL = &LockOption{Type: SharedType} } @@ -14227,7 +14224,7 @@ yydefault: case 581: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3182 +//line sql.y:3176 { yyLOCAL = &LockOption{Type: ExclusiveType} } @@ -14235,7 +14232,7 @@ yydefault: case 582: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3186 +//line sql.y:3180 { yyLOCAL = &Validation{With: true} } @@ -14243,7 +14240,7 @@ yydefault: case 583: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL AlterOption -//line sql.y:3190 +//line sql.y:3184 { yyLOCAL = &Validation{With: false} } @@ -14251,7 +14248,7 @@ yydefault: case 584: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:3196 +//line sql.y:3190 { yyDollar[1].alterTableUnion().FullyParsed = true yyDollar[1].alterTableUnion().AlterOptions = yyDollar[2].alterOptionsUnion() @@ -14262,7 +14259,7 @@ yydefault: case 585: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:3203 +//line sql.y:3197 { yyDollar[1].alterTableUnion().FullyParsed = true yyDollar[1].alterTableUnion().AlterOptions = yyDollar[2].alterOptionsUnion() @@ -14273,7 +14270,7 @@ yydefault: case 586: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:3210 +//line sql.y:3204 { yyDollar[1].alterTableUnion().FullyParsed = true yyDollar[1].alterTableUnion().AlterOptions = yyDollar[2].alterOptionsUnion() @@ -14284,7 +14281,7 @@ yydefault: case 587: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:3217 +//line sql.y:3211 { yyDollar[1].alterTableUnion().FullyParsed = true yyDollar[1].alterTableUnion().PartitionSpec = yyDollar[2].partSpecUnion() @@ -14294,7 +14291,7 @@ yydefault: case 588: yyDollar = yyS[yypt-11 : yypt+1] var yyLOCAL Statement -//line sql.y:3223 +//line sql.y:3217 { yyLOCAL = &AlterView{ViewName: yyDollar[7].tableName, Comments: Comments(yyDollar[2].strs).Parsed(), Algorithm: yyDollar[3].str, Definer: yyDollar[4].definerUnion(), Security: yyDollar[5].str, Columns: yyDollar[8].columnsUnion(), Select: yyDollar[10].selStmtUnion(), CheckOption: yyDollar[11].str} } @@ -14302,7 +14299,7 @@ yydefault: case 589: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:3233 +//line sql.y:3227 { yyDollar[1].alterDatabaseUnion().FullyParsed = true yyDollar[1].alterDatabaseUnion().DBName = yyDollar[2].identifierCS @@ -14313,7 +14310,7 @@ yydefault: case 590: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3240 +//line sql.y:3234 { yyDollar[1].alterDatabaseUnion().FullyParsed = true yyDollar[1].alterDatabaseUnion().DBName = yyDollar[2].identifierCS @@ -14324,7 +14321,7 @@ yydefault: case 591: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:3247 +//line sql.y:3241 { yyLOCAL = &AlterVschema{ Action: CreateVindexDDLAction, @@ -14340,7 +14337,7 @@ yydefault: case 592: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:3259 +//line sql.y:3253 { yyLOCAL = &AlterVschema{ Action: CreateTindexDDLAction, @@ -14356,7 +14353,7 @@ yydefault: case 593: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3271 +//line sql.y:3265 { yyLOCAL = &AlterVschema{ Action: DropVindexDDLAction, @@ -14370,7 +14367,7 @@ yydefault: case 594: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3281 +//line sql.y:3275 { yyLOCAL = &AlterVschema{ Action: DropTindexDDLAction, @@ -14384,7 +14381,7 @@ yydefault: case 595: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3291 +//line sql.y:3285 { yyLOCAL = &AlterVschema{Action: AddVschemaTableDDLAction, Table: yyDollar[6].tableName} } @@ -14392,7 +14389,7 @@ yydefault: case 596: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3295 +//line sql.y:3289 { yyLOCAL = &AlterVschema{Action: DropVschemaTableDDLAction, Table: yyDollar[6].tableName} } @@ -14400,7 +14397,7 @@ yydefault: case 597: yyDollar = yyS[yypt-13 : yypt+1] var yyLOCAL Statement -//line sql.y:3299 +//line sql.y:3293 { yyLOCAL = &AlterVschema{ Action: AddColVindexDDLAction, @@ -14417,7 +14414,7 @@ yydefault: case 598: yyDollar = yyS[yypt-15 : yypt+1] var yyLOCAL Statement -//line sql.y:3312 +//line sql.y:3306 { yyLOCAL = &AlterVschema{ Action: AddColTindexDDLAction, @@ -14435,7 +14432,7 @@ yydefault: case 599: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:3326 +//line sql.y:3320 { yyLOCAL = &AlterVschema{ Action: DropColVindexDDLAction, @@ -14449,7 +14446,7 @@ yydefault: case 600: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:3336 +//line sql.y:3330 { yyLOCAL = &AlterVschema{ Action: DropColTindexDDLAction, @@ -14463,7 +14460,7 @@ yydefault: case 601: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3346 +//line sql.y:3340 { yyLOCAL = &AlterVschema{Action: AddSequenceDDLAction, Table: yyDollar[6].tableName} } @@ -14471,7 +14468,7 @@ yydefault: case 602: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:3350 +//line sql.y:3344 { yyLOCAL = &AlterVschema{Action: DropSequenceDDLAction, Table: yyDollar[6].tableName} } @@ -14479,7 +14476,7 @@ yydefault: case 603: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Statement -//line sql.y:3354 +//line sql.y:3348 { yyLOCAL = &AlterVschema{ Action: AddAutoIncDDLAction, @@ -14494,7 +14491,7 @@ yydefault: case 604: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:3365 +//line sql.y:3359 { yyLOCAL = &AlterVschema{ Action: DropAutoIncDDLAction, @@ -14505,7 +14502,7 @@ yydefault: case 605: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:3372 +//line sql.y:3366 { yyLOCAL = &AlterVschema{ Action: AddColSingleDDLAction, @@ -14517,7 +14514,7 @@ yydefault: case 606: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:3380 +//line sql.y:3374 { yyLOCAL = &AlterVschema{ Action: DropColSingleDDLAction, @@ -14529,7 +14526,7 @@ yydefault: case 607: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:3388 +//line sql.y:3382 { yyLOCAL = &AlterVschema{ Action: AddColBroadcastDDLAction, @@ -14540,7 +14537,7 @@ yydefault: case 608: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:3395 +//line sql.y:3389 { yyLOCAL = &AlterVschema{ Action: DropColBroadcastDDLAction, @@ -14551,7 +14548,7 @@ yydefault: case 609: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3402 +//line sql.y:3396 { yyLOCAL = &AlterMigration{ Type: RetryMigrationType, @@ -14562,7 +14559,7 @@ yydefault: case 610: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3409 +//line sql.y:3403 { yyLOCAL = &AlterMigration{ Type: CleanupMigrationType, @@ -14573,7 +14570,7 @@ yydefault: case 611: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3416 +//line sql.y:3410 { yyLOCAL = &AlterMigration{ Type: LaunchMigrationType, @@ -14584,7 +14581,7 @@ yydefault: case 612: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:3423 +//line sql.y:3417 { yyLOCAL = &AlterMigration{ Type: LaunchMigrationType, @@ -14596,7 +14593,7 @@ yydefault: case 613: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3431 +//line sql.y:3425 { yyLOCAL = &AlterMigration{ Type: LaunchAllMigrationType, @@ -14606,7 +14603,7 @@ yydefault: case 614: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3437 +//line sql.y:3431 { yyLOCAL = &AlterMigration{ Type: CompleteMigrationType, @@ -14617,7 +14614,7 @@ yydefault: case 615: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3444 +//line sql.y:3438 { yyLOCAL = &AlterMigration{ Type: CompleteAllMigrationType, @@ -14627,7 +14624,7 @@ yydefault: case 616: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3450 +//line sql.y:3444 { yyLOCAL = &AlterMigration{ Type: CancelMigrationType, @@ -14638,7 +14635,7 @@ yydefault: case 617: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3457 +//line sql.y:3451 { yyLOCAL = &AlterMigration{ Type: CancelAllMigrationType, @@ -14648,7 +14645,7 @@ yydefault: case 618: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:3463 +//line sql.y:3457 { yyLOCAL = &AlterMigration{ Type: ThrottleMigrationType, @@ -14661,7 +14658,7 @@ yydefault: case 619: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:3472 +//line sql.y:3466 { yyLOCAL = &AlterMigration{ Type: ThrottleAllMigrationType, @@ -14673,7 +14670,7 @@ yydefault: case 620: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3480 +//line sql.y:3474 { yyLOCAL = &AlterMigration{ Type: UnthrottleMigrationType, @@ -14684,7 +14681,7 @@ yydefault: case 621: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:3487 +//line sql.y:3481 { yyLOCAL = &AlterMigration{ Type: UnthrottleAllMigrationType, @@ -14694,7 +14691,7 @@ yydefault: case 622: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3494 +//line sql.y:3488 { yyLOCAL = nil } @@ -14702,7 +14699,7 @@ yydefault: case 623: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3498 +//line sql.y:3492 { yyDollar[3].partitionOptionUnion().Partitions = yyDollar[4].integerUnion() yyDollar[3].partitionOptionUnion().SubPartition = yyDollar[5].subPartitionUnion() @@ -14713,7 +14710,7 @@ yydefault: case 624: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3507 +//line sql.y:3501 { yyLOCAL = &PartitionOption{ IsLinear: yyDollar[1].booleanUnion(), @@ -14725,7 +14722,7 @@ yydefault: case 625: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3515 +//line sql.y:3509 { yyLOCAL = &PartitionOption{ IsLinear: yyDollar[1].booleanUnion(), @@ -14738,7 +14735,7 @@ yydefault: case 626: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3524 +//line sql.y:3518 { yyLOCAL = &PartitionOption{ Type: yyDollar[1].partitionByTypeUnion(), @@ -14749,7 +14746,7 @@ yydefault: case 627: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3531 +//line sql.y:3525 { yyLOCAL = &PartitionOption{ Type: yyDollar[1].partitionByTypeUnion(), @@ -14760,7 +14757,7 @@ yydefault: case 628: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *DistributionPrimaryKeyOption -//line sql.y:3541 +//line sql.y:3535 { yyLOCAL = &DistributionPrimaryKeyOption{ TableName: yyDollar[1].tableName, @@ -14771,7 +14768,7 @@ yydefault: case 629: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *PartitionOption -//line sql.y:3550 +//line sql.y:3544 { yyLOCAL = &PartitionOption{ PartitionMethodName: yyDollar[1].identifierCI, @@ -14783,7 +14780,7 @@ yydefault: case 630: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *SubPartition -//line sql.y:3560 +//line sql.y:3554 { yyLOCAL = nil } @@ -14791,7 +14788,7 @@ yydefault: case 631: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *SubPartition -//line sql.y:3564 +//line sql.y:3558 { yyLOCAL = &SubPartition{ IsLinear: yyDollar[3].booleanUnion(), @@ -14804,7 +14801,7 @@ yydefault: case 632: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL *SubPartition -//line sql.y:3573 +//line sql.y:3567 { yyLOCAL = &SubPartition{ IsLinear: yyDollar[3].booleanUnion(), @@ -14818,7 +14815,7 @@ yydefault: case 633: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []*PartitionDefinition -//line sql.y:3584 +//line sql.y:3578 { yyLOCAL = nil } @@ -14826,7 +14823,7 @@ yydefault: case 634: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL []*PartitionDefinition -//line sql.y:3588 +//line sql.y:3582 { yyLOCAL = yyDollar[2].partDefsUnion() } @@ -14834,7 +14831,7 @@ yydefault: case 635: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:3593 +//line sql.y:3587 { yyLOCAL = false } @@ -14842,7 +14839,7 @@ yydefault: case 636: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:3597 +//line sql.y:3591 { yyLOCAL = true } @@ -14850,7 +14847,7 @@ yydefault: case 637: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL int -//line sql.y:3602 +//line sql.y:3596 { yyLOCAL = 0 } @@ -14858,7 +14855,7 @@ yydefault: case 638: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL int -//line sql.y:3606 +//line sql.y:3600 { yyLOCAL = convertStringToInt(yyDollar[3].str) } @@ -14866,7 +14863,7 @@ yydefault: case 639: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL TableExpr -//line sql.y:3612 +//line sql.y:3606 { yyLOCAL = &JSONTableExpr{Expr: yyDollar[3].exprUnion(), Filter: yyDollar[5].exprUnion(), Columns: yyDollar[6].jtColumnListUnion(), Alias: yyDollar[8].identifierCS} } @@ -14874,7 +14871,7 @@ yydefault: case 640: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL []*JtColumnDefinition -//line sql.y:3618 +//line sql.y:3612 { yyLOCAL = yyDollar[3].jtColumnListUnion() } @@ -14882,14 +14879,14 @@ yydefault: case 641: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*JtColumnDefinition -//line sql.y:3624 +//line sql.y:3618 { yyLOCAL = []*JtColumnDefinition{yyDollar[1].jtColumnDefinitionUnion()} } yyVAL.union = yyLOCAL case 642: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:3628 +//line sql.y:3622 { yySLICE := (*[]*JtColumnDefinition)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].jtColumnDefinitionUnion()) @@ -14897,7 +14894,7 @@ yydefault: case 643: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *JtColumnDefinition -//line sql.y:3634 +//line sql.y:3628 { yyLOCAL = &JtColumnDefinition{JtOrdinal: &JtOrdinalColDef{Name: yyDollar[1].identifierCI}} } @@ -14905,7 +14902,7 @@ yydefault: case 644: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *JtColumnDefinition -//line sql.y:3638 +//line sql.y:3632 { yyDollar[2].columnType.Options = &ColumnTypeOptions{Collate: yyDollar[3].str} jtPath := &JtPathColDef{Name: yyDollar[1].identifierCI, Type: yyDollar[2].columnType, JtColExists: yyDollar[4].booleanUnion(), Path: yyDollar[6].exprUnion()} @@ -14915,7 +14912,7 @@ yydefault: case 645: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *JtColumnDefinition -//line sql.y:3644 +//line sql.y:3638 { yyDollar[2].columnType.Options = &ColumnTypeOptions{Collate: yyDollar[3].str} jtPath := &JtPathColDef{Name: yyDollar[1].identifierCI, Type: yyDollar[2].columnType, JtColExists: yyDollar[4].booleanUnion(), Path: yyDollar[6].exprUnion(), EmptyOnResponse: yyDollar[7].jtOnResponseUnion()} @@ -14925,7 +14922,7 @@ yydefault: case 646: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *JtColumnDefinition -//line sql.y:3650 +//line sql.y:3644 { yyDollar[2].columnType.Options = &ColumnTypeOptions{Collate: yyDollar[3].str} jtPath := &JtPathColDef{Name: yyDollar[1].identifierCI, Type: yyDollar[2].columnType, JtColExists: yyDollar[4].booleanUnion(), Path: yyDollar[6].exprUnion(), ErrorOnResponse: yyDollar[7].jtOnResponseUnion()} @@ -14935,7 +14932,7 @@ yydefault: case 647: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL *JtColumnDefinition -//line sql.y:3656 +//line sql.y:3650 { yyDollar[2].columnType.Options = &ColumnTypeOptions{Collate: yyDollar[3].str} jtPath := &JtPathColDef{Name: yyDollar[1].identifierCI, Type: yyDollar[2].columnType, JtColExists: yyDollar[4].booleanUnion(), Path: yyDollar[6].exprUnion(), EmptyOnResponse: yyDollar[7].jtOnResponseUnion(), ErrorOnResponse: yyDollar[8].jtOnResponseUnion()} @@ -14945,7 +14942,7 @@ yydefault: case 648: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *JtColumnDefinition -//line sql.y:3662 +//line sql.y:3656 { jtNestedPath := &JtNestedPathColDef{Path: yyDollar[3].exprUnion(), Columns: yyDollar[4].jtColumnListUnion()} yyLOCAL = &JtColumnDefinition{JtNestedPath: jtNestedPath} @@ -14954,7 +14951,7 @@ yydefault: case 649: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:3668 +//line sql.y:3662 { yyLOCAL = false } @@ -14962,7 +14959,7 @@ yydefault: case 650: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:3672 +//line sql.y:3666 { yyLOCAL = true } @@ -14970,7 +14967,7 @@ yydefault: case 651: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:3676 +//line sql.y:3670 { yyLOCAL = false } @@ -14978,7 +14975,7 @@ yydefault: case 652: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:3680 +//line sql.y:3674 { yyLOCAL = true } @@ -14986,7 +14983,7 @@ yydefault: case 653: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *JtOnResponse -//line sql.y:3686 +//line sql.y:3680 { yyLOCAL = yyDollar[1].jtOnResponseUnion() } @@ -14994,7 +14991,7 @@ yydefault: case 654: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *JtOnResponse -//line sql.y:3692 +//line sql.y:3686 { yyLOCAL = yyDollar[1].jtOnResponseUnion() } @@ -15002,7 +14999,7 @@ yydefault: case 655: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *JtOnResponse -//line sql.y:3698 +//line sql.y:3692 { yyLOCAL = &JtOnResponse{ResponseType: ErrorJSONType} } @@ -15010,7 +15007,7 @@ yydefault: case 656: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *JtOnResponse -//line sql.y:3702 +//line sql.y:3696 { yyLOCAL = &JtOnResponse{ResponseType: NullJSONType} } @@ -15018,7 +15015,7 @@ yydefault: case 657: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *JtOnResponse -//line sql.y:3706 +//line sql.y:3700 { yyLOCAL = &JtOnResponse{ResponseType: DefaultJSONType, Expr: yyDollar[2].exprUnion()} } @@ -15026,7 +15023,7 @@ yydefault: case 658: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL PartitionByType -//line sql.y:3712 +//line sql.y:3706 { yyLOCAL = RangeType } @@ -15034,7 +15031,7 @@ yydefault: case 659: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL PartitionByType -//line sql.y:3716 +//line sql.y:3710 { yyLOCAL = ListType } @@ -15042,7 +15039,7 @@ yydefault: case 660: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL int -//line sql.y:3721 +//line sql.y:3715 { yyLOCAL = -1 } @@ -15050,7 +15047,7 @@ yydefault: case 661: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL int -//line sql.y:3725 +//line sql.y:3719 { yyLOCAL = convertStringToInt(yyDollar[2].str) } @@ -15058,7 +15055,7 @@ yydefault: case 662: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL int -//line sql.y:3730 +//line sql.y:3724 { yyLOCAL = -1 } @@ -15066,7 +15063,7 @@ yydefault: case 663: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL int -//line sql.y:3734 +//line sql.y:3728 { yyLOCAL = convertStringToInt(yyDollar[2].str) } @@ -15074,7 +15071,7 @@ yydefault: case 664: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3740 +//line sql.y:3734 { yyLOCAL = &PartitionSpec{Action: AddAction, Definitions: []*PartitionDefinition{yyDollar[4].partDefUnion()}} } @@ -15082,7 +15079,7 @@ yydefault: case 665: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3744 +//line sql.y:3738 { yyLOCAL = &PartitionSpec{Action: DropAction, Names: yyDollar[3].partitionsUnion()} } @@ -15090,7 +15087,7 @@ yydefault: case 666: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3748 +//line sql.y:3742 { yyLOCAL = &PartitionSpec{Action: ReorganizeAction, Names: yyDollar[3].partitionsUnion(), Definitions: yyDollar[6].partDefsUnion()} } @@ -15098,7 +15095,7 @@ yydefault: case 667: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3752 +//line sql.y:3746 { yyLOCAL = &PartitionSpec{Action: DiscardAction, Names: yyDollar[3].partitionsUnion()} } @@ -15106,7 +15103,7 @@ yydefault: case 668: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3756 +//line sql.y:3750 { yyLOCAL = &PartitionSpec{Action: DiscardAction, IsAll: true} } @@ -15114,7 +15111,7 @@ yydefault: case 669: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3760 +//line sql.y:3754 { yyLOCAL = &PartitionSpec{Action: ImportAction, Names: yyDollar[3].partitionsUnion()} } @@ -15122,7 +15119,7 @@ yydefault: case 670: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3764 +//line sql.y:3758 { yyLOCAL = &PartitionSpec{Action: ImportAction, IsAll: true} } @@ -15130,7 +15127,7 @@ yydefault: case 671: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3768 +//line sql.y:3762 { yyLOCAL = &PartitionSpec{Action: TruncateAction, Names: yyDollar[3].partitionsUnion()} } @@ -15138,7 +15135,7 @@ yydefault: case 672: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3772 +//line sql.y:3766 { yyLOCAL = &PartitionSpec{Action: TruncateAction, IsAll: true} } @@ -15146,7 +15143,7 @@ yydefault: case 673: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3776 +//line sql.y:3770 { yyLOCAL = &PartitionSpec{Action: CoalesceAction, Number: NewIntLiteral(yyDollar[3].str)} } @@ -15154,7 +15151,7 @@ yydefault: case 674: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3780 +//line sql.y:3774 { yyLOCAL = &PartitionSpec{Action: ExchangeAction, Names: Partitions{yyDollar[3].identifierCI}, TableName: yyDollar[6].tableName, WithoutValidation: yyDollar[7].booleanUnion()} } @@ -15162,7 +15159,7 @@ yydefault: case 675: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3784 +//line sql.y:3778 { yyLOCAL = &PartitionSpec{Action: AnalyzeAction, Names: yyDollar[3].partitionsUnion()} } @@ -15170,7 +15167,7 @@ yydefault: case 676: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3788 +//line sql.y:3782 { yyLOCAL = &PartitionSpec{Action: AnalyzeAction, IsAll: true} } @@ -15178,7 +15175,7 @@ yydefault: case 677: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3792 +//line sql.y:3786 { yyLOCAL = &PartitionSpec{Action: CheckAction, Names: yyDollar[3].partitionsUnion()} } @@ -15186,7 +15183,7 @@ yydefault: case 678: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3796 +//line sql.y:3790 { yyLOCAL = &PartitionSpec{Action: CheckAction, IsAll: true} } @@ -15194,7 +15191,7 @@ yydefault: case 679: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3800 +//line sql.y:3794 { yyLOCAL = &PartitionSpec{Action: OptimizeAction, Names: yyDollar[3].partitionsUnion()} } @@ -15202,7 +15199,7 @@ yydefault: case 680: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3804 +//line sql.y:3798 { yyLOCAL = &PartitionSpec{Action: OptimizeAction, IsAll: true} } @@ -15210,7 +15207,7 @@ yydefault: case 681: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3808 +//line sql.y:3802 { yyLOCAL = &PartitionSpec{Action: RebuildAction, Names: yyDollar[3].partitionsUnion()} } @@ -15218,7 +15215,7 @@ yydefault: case 682: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3812 +//line sql.y:3806 { yyLOCAL = &PartitionSpec{Action: RebuildAction, IsAll: true} } @@ -15226,7 +15223,7 @@ yydefault: case 683: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3816 +//line sql.y:3810 { yyLOCAL = &PartitionSpec{Action: RepairAction, Names: yyDollar[3].partitionsUnion()} } @@ -15234,7 +15231,7 @@ yydefault: case 684: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3820 +//line sql.y:3814 { yyLOCAL = &PartitionSpec{Action: RepairAction, IsAll: true} } @@ -15242,7 +15239,7 @@ yydefault: case 685: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionSpec -//line sql.y:3824 +//line sql.y:3818 { yyLOCAL = &PartitionSpec{Action: UpgradeAction} } @@ -15250,7 +15247,7 @@ yydefault: case 686: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:3829 +//line sql.y:3823 { yyLOCAL = false } @@ -15258,7 +15255,7 @@ yydefault: case 687: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:3833 +//line sql.y:3827 { yyLOCAL = false } @@ -15266,7 +15263,7 @@ yydefault: case 688: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:3837 +//line sql.y:3831 { yyLOCAL = true } @@ -15274,28 +15271,28 @@ yydefault: case 689: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*PartitionDefinition -//line sql.y:3843 +//line sql.y:3837 { yyLOCAL = []*PartitionDefinition{yyDollar[1].partDefUnion()} } yyVAL.union = yyLOCAL case 690: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:3847 +//line sql.y:3841 { yySLICE := (*[]*PartitionDefinition)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].partDefUnion()) } case 691: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:3853 +//line sql.y:3847 { yyVAL.partDefUnion().Options = yyDollar[2].partitionDefinitionOptionsUnion() } case 692: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3858 +//line sql.y:3852 { yyLOCAL = &PartitionDefinitionOptions{} } @@ -15303,7 +15300,7 @@ yydefault: case 693: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3862 +//line sql.y:3856 { yyDollar[1].partitionDefinitionOptionsUnion().ValueRange = yyDollar[2].partitionValueRangeUnion() yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15312,7 +15309,7 @@ yydefault: case 694: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3867 +//line sql.y:3861 { yyDollar[1].partitionDefinitionOptionsUnion().Comment = yyDollar[2].literalUnion() yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15321,7 +15318,7 @@ yydefault: case 695: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3872 +//line sql.y:3866 { yyDollar[1].partitionDefinitionOptionsUnion().Engine = yyDollar[2].partitionEngineUnion() yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15330,7 +15327,7 @@ yydefault: case 696: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3877 +//line sql.y:3871 { yyDollar[1].partitionDefinitionOptionsUnion().DataDirectory = yyDollar[2].literalUnion() yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15339,7 +15336,7 @@ yydefault: case 697: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3882 +//line sql.y:3876 { yyDollar[1].partitionDefinitionOptionsUnion().IndexDirectory = yyDollar[2].literalUnion() yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15348,27 +15345,25 @@ yydefault: case 698: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3887 +//line sql.y:3881 { - val := yyDollar[2].integerUnion() - yyDollar[1].partitionDefinitionOptionsUnion().MaxRows = &val + yyDollar[1].partitionDefinitionOptionsUnion().MaxRows = ptr.Of(yyDollar[2].integerUnion()) yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() } yyVAL.union = yyLOCAL case 699: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3893 +//line sql.y:3886 { - val := yyDollar[2].integerUnion() - yyDollar[1].partitionDefinitionOptionsUnion().MinRows = &val + yyDollar[1].partitionDefinitionOptionsUnion().MinRows = ptr.Of(yyDollar[2].integerUnion()) yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() } yyVAL.union = yyLOCAL case 700: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3899 +//line sql.y:3891 { yyDollar[1].partitionDefinitionOptionsUnion().TableSpace = yyDollar[2].str yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15377,7 +15372,7 @@ yydefault: case 701: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinitionOptions -//line sql.y:3904 +//line sql.y:3896 { yyDollar[1].partitionDefinitionOptionsUnion().SubPartitionDefinitions = yyDollar[2].subPartitionDefinitionsUnion() yyLOCAL = yyDollar[1].partitionDefinitionOptionsUnion() @@ -15386,7 +15381,7 @@ yydefault: case 702: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SubPartitionDefinitions -//line sql.y:3910 +//line sql.y:3902 { yyLOCAL = yyDollar[2].subPartitionDefinitionsUnion() } @@ -15394,14 +15389,14 @@ yydefault: case 703: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SubPartitionDefinitions -//line sql.y:3916 +//line sql.y:3908 { yyLOCAL = SubPartitionDefinitions{yyDollar[1].subPartitionDefinitionUnion()} } yyVAL.union = yyLOCAL case 704: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:3920 +//line sql.y:3912 { yySLICE := (*SubPartitionDefinitions)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].subPartitionDefinitionUnion()) @@ -15409,7 +15404,7 @@ yydefault: case 705: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SubPartitionDefinition -//line sql.y:3926 +//line sql.y:3918 { yyLOCAL = &SubPartitionDefinition{Name: yyDollar[2].identifierCI, Options: yyDollar[3].subPartitionDefinitionOptionsUnion()} } @@ -15417,7 +15412,7 @@ yydefault: case 706: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3931 +//line sql.y:3923 { yyLOCAL = &SubPartitionDefinitionOptions{} } @@ -15425,7 +15420,7 @@ yydefault: case 707: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3935 +//line sql.y:3927 { yyDollar[1].subPartitionDefinitionOptionsUnion().Comment = yyDollar[2].literalUnion() yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() @@ -15434,7 +15429,7 @@ yydefault: case 708: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3940 +//line sql.y:3932 { yyDollar[1].subPartitionDefinitionOptionsUnion().Engine = yyDollar[2].partitionEngineUnion() yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() @@ -15443,7 +15438,7 @@ yydefault: case 709: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3945 +//line sql.y:3937 { yyDollar[1].subPartitionDefinitionOptionsUnion().DataDirectory = yyDollar[2].literalUnion() yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() @@ -15452,7 +15447,7 @@ yydefault: case 710: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3950 +//line sql.y:3942 { yyDollar[1].subPartitionDefinitionOptionsUnion().IndexDirectory = yyDollar[2].literalUnion() yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() @@ -15461,27 +15456,25 @@ yydefault: case 711: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3955 +//line sql.y:3947 { - val := yyDollar[2].integerUnion() - yyDollar[1].subPartitionDefinitionOptionsUnion().MaxRows = &val + yyDollar[1].subPartitionDefinitionOptionsUnion().MaxRows = ptr.Of(yyDollar[2].integerUnion()) yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() } yyVAL.union = yyLOCAL case 712: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3961 +//line sql.y:3952 { - val := yyDollar[2].integerUnion() - yyDollar[1].subPartitionDefinitionOptionsUnion().MinRows = &val + yyDollar[1].subPartitionDefinitionOptionsUnion().MinRows = ptr.Of(yyDollar[2].integerUnion()) yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() } yyVAL.union = yyLOCAL case 713: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *SubPartitionDefinitionOptions -//line sql.y:3967 +//line sql.y:3957 { yyDollar[1].subPartitionDefinitionOptionsUnion().TableSpace = yyDollar[2].str yyLOCAL = yyDollar[1].subPartitionDefinitionOptionsUnion() @@ -15490,7 +15483,7 @@ yydefault: case 714: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionValueRange -//line sql.y:3974 +//line sql.y:3964 { yyLOCAL = &PartitionValueRange{ Type: LessThanType, @@ -15501,7 +15494,7 @@ yydefault: case 715: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionValueRange -//line sql.y:3981 +//line sql.y:3971 { yyLOCAL = &PartitionValueRange{ Type: LessThanType, @@ -15512,7 +15505,7 @@ yydefault: case 716: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *PartitionValueRange -//line sql.y:3988 +//line sql.y:3978 { yyLOCAL = &PartitionValueRange{ Type: InType, @@ -15523,7 +15516,7 @@ yydefault: case 717: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:3996 +//line sql.y:3986 { yyLOCAL = false } @@ -15531,7 +15524,7 @@ yydefault: case 718: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4000 +//line sql.y:3990 { yyLOCAL = true } @@ -15539,7 +15532,7 @@ yydefault: case 719: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *PartitionEngine -//line sql.y:4006 +//line sql.y:3996 { yyLOCAL = &PartitionEngine{Storage: yyDollar[1].booleanUnion(), Name: yyDollar[4].identifierCS.String()} } @@ -15547,7 +15540,7 @@ yydefault: case 720: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *Literal -//line sql.y:4012 +//line sql.y:4002 { yyLOCAL = NewStrLiteral(yyDollar[3].str) } @@ -15555,7 +15548,7 @@ yydefault: case 721: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *Literal -//line sql.y:4018 +//line sql.y:4008 { yyLOCAL = NewStrLiteral(yyDollar[4].str) } @@ -15563,7 +15556,7 @@ yydefault: case 722: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *Literal -//line sql.y:4024 +//line sql.y:4014 { yyLOCAL = NewStrLiteral(yyDollar[4].str) } @@ -15571,7 +15564,7 @@ yydefault: case 723: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL int -//line sql.y:4030 +//line sql.y:4020 { yyLOCAL = convertStringToInt(yyDollar[3].str) } @@ -15579,41 +15572,41 @@ yydefault: case 724: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL int -//line sql.y:4036 +//line sql.y:4026 { yyLOCAL = convertStringToInt(yyDollar[3].str) } yyVAL.union = yyLOCAL case 725: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4042 +//line sql.y:4032 { yyVAL.str = yyDollar[3].identifierCS.String() } case 726: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *PartitionDefinition -//line sql.y:4048 +//line sql.y:4038 { yyLOCAL = &PartitionDefinition{Name: yyDollar[2].identifierCI} } yyVAL.union = yyLOCAL case 727: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4054 +//line sql.y:4044 { yyVAL.str = "" } case 728: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4058 +//line sql.y:4048 { yyVAL.str = "" } case 729: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4064 +//line sql.y:4054 { yyLOCAL = &RenameTable{TablePairs: yyDollar[3].renameTablePairsUnion()} } @@ -15621,14 +15614,14 @@ yydefault: case 730: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL []*RenameTablePair -//line sql.y:4070 +//line sql.y:4060 { yyLOCAL = []*RenameTablePair{{FromTable: yyDollar[1].tableName, ToTable: yyDollar[3].tableName}} } yyVAL.union = yyLOCAL case 731: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:4074 +//line sql.y:4064 { yySLICE := (*[]*RenameTablePair)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, &RenameTablePair{FromTable: yyDollar[3].tableName, ToTable: yyDollar[5].tableName}) @@ -15636,7 +15629,7 @@ yydefault: case 732: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Statement -//line sql.y:4080 +//line sql.y:4070 { yyLOCAL = &DropTable{FromTables: yyDollar[6].tableNamesUnion(), IfExists: yyDollar[5].booleanUnion(), Comments: Comments(yyDollar[2].strs).Parsed(), Temp: yyDollar[3].booleanUnion(), DropSchema: yyDollar[8].booleanUnion()} } @@ -15644,7 +15637,7 @@ yydefault: case 733: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:4084 +//line sql.y:4074 { // Change this to an alter statement if yyDollar[4].identifierCI.Lowered() == "primary" { @@ -15657,7 +15650,7 @@ yydefault: case 734: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:4093 +//line sql.y:4083 { yyLOCAL = &DropView{FromTables: yyDollar[5].tableNamesUnion(), Comments: Comments(yyDollar[2].strs).Parsed(), IfExists: yyDollar[4].booleanUnion()} } @@ -15665,7 +15658,7 @@ yydefault: case 735: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4097 +//line sql.y:4087 { yyLOCAL = &DropDatabase{Comments: Comments(yyDollar[2].strs).Parsed(), DBName: yyDollar[5].identifierCS, IfExists: yyDollar[4].booleanUnion()} } @@ -15673,7 +15666,7 @@ yydefault: case 736: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4103 +//line sql.y:4093 { yyLOCAL = &TruncateTable{Table: yyDollar[3].tableName} } @@ -15681,7 +15674,7 @@ yydefault: case 737: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4107 +//line sql.y:4097 { yyLOCAL = &TruncateTable{Table: yyDollar[2].tableName} } @@ -15689,7 +15682,7 @@ yydefault: case 738: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4113 +//line sql.y:4103 { yyLOCAL = &Analyze{IsLocal: yyDollar[2].booleanUnion(), Table: yyDollar[4].tableName} } @@ -15697,7 +15690,7 @@ yydefault: case 739: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4119 +//line sql.y:4109 { yyLOCAL = &PurgeBinaryLogs{To: string(yyDollar[5].str)} } @@ -15705,7 +15698,7 @@ yydefault: case 740: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4123 +//line sql.y:4113 { yyLOCAL = &PurgeBinaryLogs{Before: string(yyDollar[5].str)} } @@ -15713,7 +15706,7 @@ yydefault: case 741: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4129 +//line sql.y:4119 { yyLOCAL = &Show{&ShowBasic{Command: Charset, Filter: yyDollar[3].showFilterUnion()}} } @@ -15721,7 +15714,7 @@ yydefault: case 742: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4133 +//line sql.y:4123 { yyLOCAL = &Show{&ShowBasic{Command: Collation, Filter: yyDollar[3].showFilterUnion()}} } @@ -15729,7 +15722,7 @@ yydefault: case 743: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:4137 +//line sql.y:4127 { yyLOCAL = &Show{&ShowBasic{Full: yyDollar[2].booleanUnion(), Command: Column, Tbl: yyDollar[5].tableName, DbName: yyDollar[6].identifierCS, Filter: yyDollar[7].showFilterUnion()}} } @@ -15737,7 +15730,7 @@ yydefault: case 744: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4141 +//line sql.y:4131 { yyLOCAL = &Show{&ShowBasic{Command: Database, Filter: yyDollar[3].showFilterUnion()}} } @@ -15745,7 +15738,7 @@ yydefault: case 745: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4145 +//line sql.y:4135 { yyLOCAL = &Show{&ShowBasic{Command: Database, Filter: yyDollar[3].showFilterUnion()}} } @@ -15753,7 +15746,7 @@ yydefault: case 746: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4149 +//line sql.y:4139 { yyLOCAL = &Show{&ShowBasic{Command: Keyspace, Filter: yyDollar[3].showFilterUnion()}} } @@ -15761,7 +15754,7 @@ yydefault: case 747: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4153 +//line sql.y:4143 { yyLOCAL = &Show{&ShowBasic{Command: Keyspace, Filter: yyDollar[3].showFilterUnion()}} } @@ -15769,7 +15762,7 @@ yydefault: case 748: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4157 +//line sql.y:4147 { yyLOCAL = &Show{&ShowBasic{Command: Function, Filter: yyDollar[4].showFilterUnion()}} } @@ -15777,7 +15770,7 @@ yydefault: case 749: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:4161 +//line sql.y:4151 { yyLOCAL = &Show{&ShowBasic{Command: Index, Tbl: yyDollar[5].tableName, DbName: yyDollar[6].identifierCS, Filter: yyDollar[7].showFilterUnion()}} } @@ -15785,7 +15778,7 @@ yydefault: case 750: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4165 +//line sql.y:4155 { yyLOCAL = &Show{&ShowBasic{Command: OpenTable, DbName: yyDollar[4].identifierCS, Filter: yyDollar[5].showFilterUnion()}} } @@ -15793,7 +15786,7 @@ yydefault: case 751: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4169 +//line sql.y:4159 { yyLOCAL = &Show{&ShowBasic{Command: Privilege}} } @@ -15801,7 +15794,7 @@ yydefault: case 752: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4173 +//line sql.y:4163 { yyLOCAL = &Show{&ShowBasic{Command: Procedure, Filter: yyDollar[4].showFilterUnion()}} } @@ -15809,7 +15802,7 @@ yydefault: case 753: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4177 +//line sql.y:4167 { yyLOCAL = &Show{&ShowBasic{Command: StatusSession, Filter: yyDollar[4].showFilterUnion()}} } @@ -15817,7 +15810,7 @@ yydefault: case 754: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4181 +//line sql.y:4171 { yyLOCAL = &Show{&ShowBasic{Command: StatusGlobal, Filter: yyDollar[4].showFilterUnion()}} } @@ -15825,7 +15818,7 @@ yydefault: case 755: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4185 +//line sql.y:4175 { yyLOCAL = &Show{&ShowBasic{Command: VariableSession, Filter: yyDollar[4].showFilterUnion()}} } @@ -15833,7 +15826,7 @@ yydefault: case 756: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4189 +//line sql.y:4179 { yyLOCAL = &Show{&ShowBasic{Command: VariableGlobal, Filter: yyDollar[4].showFilterUnion()}} } @@ -15841,7 +15834,7 @@ yydefault: case 757: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4193 +//line sql.y:4183 { yyLOCAL = &Show{&ShowBasic{Command: TableStatus, DbName: yyDollar[4].identifierCS, Filter: yyDollar[5].showFilterUnion()}} } @@ -15849,7 +15842,7 @@ yydefault: case 758: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4197 +//line sql.y:4187 { yyLOCAL = &Show{&ShowBasic{Command: Table, Full: yyDollar[2].booleanUnion(), DbName: yyDollar[4].identifierCS, Filter: yyDollar[5].showFilterUnion()}} } @@ -15857,7 +15850,7 @@ yydefault: case 759: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4201 +//line sql.y:4191 { yyLOCAL = &Show{&ShowBasic{Command: Trigger, DbName: yyDollar[3].identifierCS, Filter: yyDollar[4].showFilterUnion()}} } @@ -15865,7 +15858,7 @@ yydefault: case 760: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4205 +//line sql.y:4195 { yyLOCAL = &Show{&ShowCreate{Command: CreateDb, Op: yyDollar[4].tableName}} } @@ -15873,7 +15866,7 @@ yydefault: case 761: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4209 +//line sql.y:4199 { yyLOCAL = &Show{&ShowCreate{Command: CreateE, Op: yyDollar[4].tableName}} } @@ -15881,7 +15874,7 @@ yydefault: case 762: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4213 +//line sql.y:4203 { yyLOCAL = &Show{&ShowCreate{Command: CreateF, Op: yyDollar[4].tableName}} } @@ -15889,7 +15882,7 @@ yydefault: case 763: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4217 +//line sql.y:4207 { yyLOCAL = &Show{&ShowCreate{Command: CreateProc, Op: yyDollar[4].tableName}} } @@ -15897,7 +15890,7 @@ yydefault: case 764: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4221 +//line sql.y:4211 { yyLOCAL = &Show{&ShowCreate{Command: CreateTbl, Op: yyDollar[4].tableName}} } @@ -15905,7 +15898,7 @@ yydefault: case 765: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4225 +//line sql.y:4215 { yyLOCAL = &Show{&ShowCreate{Command: CreateTr, Op: yyDollar[4].tableName}} } @@ -15913,7 +15906,7 @@ yydefault: case 766: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4229 +//line sql.y:4219 { yyLOCAL = &Show{&ShowCreate{Command: CreateV, Op: yyDollar[4].tableName}} } @@ -15921,7 +15914,7 @@ yydefault: case 767: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4233 +//line sql.y:4223 { yyLOCAL = &Show{&ShowBasic{Command: Engines}} } @@ -15929,7 +15922,7 @@ yydefault: case 768: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4237 +//line sql.y:4227 { yyLOCAL = &Show{&ShowBasic{Command: Plugins}} } @@ -15937,7 +15930,7 @@ yydefault: case 769: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4241 +//line sql.y:4231 { yyLOCAL = &Show{&ShowBasic{Command: GtidExecGlobal, DbName: yyDollar[4].identifierCS}} } @@ -15945,7 +15938,7 @@ yydefault: case 770: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4245 +//line sql.y:4235 { yyLOCAL = &Show{&ShowBasic{Command: VGtidExecGlobal, DbName: yyDollar[4].identifierCS}} } @@ -15953,7 +15946,7 @@ yydefault: case 771: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4249 +//line sql.y:4239 { yyLOCAL = &Show{&ShowBasic{Command: VitessVariables, Filter: yyDollar[4].showFilterUnion()}} } @@ -15961,7 +15954,7 @@ yydefault: case 772: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4253 +//line sql.y:4243 { yyLOCAL = &Show{&ShowBasic{Command: VitessMigrations, Filter: yyDollar[4].showFilterUnion(), DbName: yyDollar[3].identifierCS}} } @@ -15969,7 +15962,7 @@ yydefault: case 773: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4257 +//line sql.y:4247 { yyLOCAL = &ShowMigrationLogs{UUID: string(yyDollar[3].str)} } @@ -15977,7 +15970,7 @@ yydefault: case 774: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4261 +//line sql.y:4251 { yyLOCAL = &ShowThrottledApps{} } @@ -15985,7 +15978,7 @@ yydefault: case 775: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4265 +//line sql.y:4255 { yyLOCAL = &Show{&ShowBasic{Command: VitessReplicationStatus, Filter: yyDollar[3].showFilterUnion()}} } @@ -15993,7 +15986,7 @@ yydefault: case 776: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4269 +//line sql.y:4259 { yyLOCAL = &ShowThrottlerStatus{} } @@ -16001,7 +15994,7 @@ yydefault: case 777: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4273 +//line sql.y:4263 { yyLOCAL = &Show{&ShowBasic{Command: VschemaTables}} } @@ -16009,7 +16002,7 @@ yydefault: case 778: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4277 +//line sql.y:4267 { yyLOCAL = &Show{&ShowBasic{Command: VschemaVindexes}} } @@ -16017,7 +16010,7 @@ yydefault: case 779: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4281 +//line sql.y:4271 { yyLOCAL = &Show{&ShowBasic{Command: VschemaVindexes, Tbl: yyDollar[5].tableName}} } @@ -16025,7 +16018,7 @@ yydefault: case 780: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4285 +//line sql.y:4275 { yyLOCAL = &Show{&ShowBasic{Command: Warnings}} } @@ -16033,7 +16026,7 @@ yydefault: case 781: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4289 +//line sql.y:4279 { yyLOCAL = &Show{&ShowBasic{Command: VitessShards, Filter: yyDollar[3].showFilterUnion()}} } @@ -16041,7 +16034,7 @@ yydefault: case 782: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4293 +//line sql.y:4283 { yyLOCAL = &Show{&ShowBasic{Command: VitessTablets, Filter: yyDollar[3].showFilterUnion()}} } @@ -16049,7 +16042,7 @@ yydefault: case 783: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4297 +//line sql.y:4287 { yyLOCAL = &Show{&ShowBasic{Command: VitessTarget}} } @@ -16057,7 +16050,7 @@ yydefault: case 784: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4304 +//line sql.y:4294 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].identifierCI.String())}} } @@ -16065,7 +16058,7 @@ yydefault: case 785: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4308 +//line sql.y:4298 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str) + " " + string(yyDollar[3].str)}} } @@ -16073,7 +16066,7 @@ yydefault: case 786: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4312 +//line sql.y:4302 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str) + " " + yyDollar[3].identifierCI.String()}} } @@ -16081,7 +16074,7 @@ yydefault: case 787: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4316 +//line sql.y:4306 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str) + " " + string(yyDollar[3].str)}} } @@ -16089,7 +16082,7 @@ yydefault: case 788: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4320 +//line sql.y:4310 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str)}} } @@ -16097,7 +16090,7 @@ yydefault: case 789: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4324 +//line sql.y:4314 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str) + " " + string(yyDollar[3].str) + " " + String(yyDollar[4].tableName)}} } @@ -16105,7 +16098,7 @@ yydefault: case 790: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4328 +//line sql.y:4318 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str) + " " + string(yyDollar[3].str) + " " + String(yyDollar[4].tableName)}} } @@ -16113,7 +16106,7 @@ yydefault: case 791: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4332 +//line sql.y:4322 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[3].str)}} } @@ -16121,27 +16114,27 @@ yydefault: case 792: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4336 +//line sql.y:4326 { yyLOCAL = &Show{&ShowOther{Command: string(yyDollar[2].str)}} } yyVAL.union = yyLOCAL case 793: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4342 +//line sql.y:4332 { yyVAL.str = "" } case 794: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4346 +//line sql.y:4336 { yyVAL.str = "extended " } case 795: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:4352 +//line sql.y:4342 { yyLOCAL = false } @@ -16149,45 +16142,45 @@ yydefault: case 796: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4356 +//line sql.y:4346 { yyLOCAL = true } yyVAL.union = yyLOCAL case 797: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4362 +//line sql.y:4352 { yyVAL.str = string(yyDollar[1].str) } case 798: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4366 +//line sql.y:4356 { yyVAL.str = string(yyDollar[1].str) } case 799: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4372 +//line sql.y:4362 { yyVAL.identifierCS = NewIdentifierCS("") } case 800: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4376 +//line sql.y:4366 { yyVAL.identifierCS = yyDollar[2].identifierCS } case 801: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4380 +//line sql.y:4370 { yyVAL.identifierCS = yyDollar[2].identifierCS } case 802: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ShowFilter -//line sql.y:4386 +//line sql.y:4376 { yyLOCAL = nil } @@ -16195,7 +16188,7 @@ yydefault: case 803: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ShowFilter -//line sql.y:4390 +//line sql.y:4380 { yyLOCAL = &ShowFilter{Like: string(yyDollar[2].str)} } @@ -16203,7 +16196,7 @@ yydefault: case 804: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ShowFilter -//line sql.y:4394 +//line sql.y:4384 { yyLOCAL = &ShowFilter{Filter: yyDollar[2].exprUnion()} } @@ -16211,7 +16204,7 @@ yydefault: case 805: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ShowFilter -//line sql.y:4400 +//line sql.y:4390 { yyLOCAL = nil } @@ -16219,45 +16212,45 @@ yydefault: case 806: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ShowFilter -//line sql.y:4404 +//line sql.y:4394 { yyLOCAL = &ShowFilter{Like: string(yyDollar[2].str)} } yyVAL.union = yyLOCAL case 807: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4410 +//line sql.y:4400 { yyVAL.empty = struct{}{} } case 808: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4414 +//line sql.y:4404 { yyVAL.empty = struct{}{} } case 809: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4418 +//line sql.y:4408 { yyVAL.empty = struct{}{} } case 810: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4424 +//line sql.y:4414 { yyVAL.str = string(yyDollar[1].str) } case 811: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4428 +//line sql.y:4418 { yyVAL.str = string(yyDollar[1].str) } case 812: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4434 +//line sql.y:4424 { yyLOCAL = &Use{DBName: yyDollar[2].identifierCS} } @@ -16265,7 +16258,7 @@ yydefault: case 813: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4438 +//line sql.y:4428 { yyLOCAL = &Use{DBName: IdentifierCS{v: ""}} } @@ -16273,39 +16266,39 @@ yydefault: case 814: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4442 +//line sql.y:4432 { yyLOCAL = &Use{DBName: NewIdentifierCS(yyDollar[2].identifierCS.String() + "@" + string(yyDollar[3].str))} } yyVAL.union = yyLOCAL case 815: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4449 +//line sql.y:4439 { yyVAL.identifierCS = NewIdentifierCS(string(yyDollar[1].str)) } case 816: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4453 +//line sql.y:4443 { yyVAL.identifierCS = NewIdentifierCS("@" + string(yyDollar[1].str)) } case 817: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4457 +//line sql.y:4447 { yyVAL.identifierCS = NewIdentifierCS("@@" + string(yyDollar[1].str)) } case 818: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4461 +//line sql.y:4451 { yyVAL.identifierCS = NewIdentifierCS(string(yyDollar[1].str)) } case 819: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4468 +//line sql.y:4458 { yyLOCAL = &Begin{} } @@ -16313,7 +16306,7 @@ yydefault: case 820: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4472 +//line sql.y:4462 { yyLOCAL = &Begin{TxAccessModes: yyDollar[3].txAccessModesUnion()} } @@ -16321,7 +16314,7 @@ yydefault: case 821: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []TxAccessMode -//line sql.y:4477 +//line sql.y:4467 { yyLOCAL = nil } @@ -16329,7 +16322,7 @@ yydefault: case 822: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []TxAccessMode -//line sql.y:4481 +//line sql.y:4471 { yyLOCAL = yyDollar[1].txAccessModesUnion() } @@ -16337,14 +16330,14 @@ yydefault: case 823: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []TxAccessMode -//line sql.y:4487 +//line sql.y:4477 { yyLOCAL = []TxAccessMode{yyDollar[1].txAccessModeUnion()} } yyVAL.union = yyLOCAL case 824: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4491 +//line sql.y:4481 { yySLICE := (*[]TxAccessMode)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].txAccessModeUnion()) @@ -16352,7 +16345,7 @@ yydefault: case 825: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL TxAccessMode -//line sql.y:4497 +//line sql.y:4487 { yyLOCAL = WithConsistentSnapshot } @@ -16360,7 +16353,7 @@ yydefault: case 826: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL TxAccessMode -//line sql.y:4501 +//line sql.y:4491 { yyLOCAL = ReadWrite } @@ -16368,7 +16361,7 @@ yydefault: case 827: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL TxAccessMode -//line sql.y:4505 +//line sql.y:4495 { yyLOCAL = ReadOnly } @@ -16376,7 +16369,7 @@ yydefault: case 828: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4512 +//line sql.y:4502 { yyLOCAL = &Commit{} } @@ -16384,7 +16377,7 @@ yydefault: case 829: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4518 +//line sql.y:4508 { yyLOCAL = &Rollback{} } @@ -16392,39 +16385,39 @@ yydefault: case 830: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4522 +//line sql.y:4512 { yyLOCAL = &SRollback{Name: yyDollar[5].identifierCI} } yyVAL.union = yyLOCAL case 831: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4527 +//line sql.y:4517 { yyVAL.empty = struct{}{} } case 832: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4529 +//line sql.y:4519 { yyVAL.empty = struct{}{} } case 833: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4532 +//line sql.y:4522 { yyVAL.empty = struct{}{} } case 834: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4534 +//line sql.y:4524 { yyVAL.empty = struct{}{} } case 835: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4538 +//line sql.y:4528 { yyLOCAL = &Savepoint{Name: yyDollar[2].identifierCI} } @@ -16432,7 +16425,7 @@ yydefault: case 836: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4544 +//line sql.y:4534 { yyLOCAL = &Release{Name: yyDollar[3].identifierCI} } @@ -16440,7 +16433,7 @@ yydefault: case 837: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4549 +//line sql.y:4539 { yyLOCAL = EmptyType } @@ -16448,7 +16441,7 @@ yydefault: case 838: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4553 +//line sql.y:4543 { yyLOCAL = JSONType } @@ -16456,7 +16449,7 @@ yydefault: case 839: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4557 +//line sql.y:4547 { yyLOCAL = TreeType } @@ -16464,7 +16457,7 @@ yydefault: case 840: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4561 +//line sql.y:4551 { yyLOCAL = VitessType } @@ -16472,7 +16465,7 @@ yydefault: case 841: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4565 +//line sql.y:4555 { yyLOCAL = VTExplainType } @@ -16480,7 +16473,7 @@ yydefault: case 842: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4569 +//line sql.y:4559 { yyLOCAL = TraditionalType } @@ -16488,7 +16481,7 @@ yydefault: case 843: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ExplainType -//line sql.y:4573 +//line sql.y:4563 { yyLOCAL = AnalyzeType } @@ -16496,7 +16489,7 @@ yydefault: case 844: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL VExplainType -//line sql.y:4578 +//line sql.y:4568 { yyLOCAL = PlanVExplainType } @@ -16504,7 +16497,7 @@ yydefault: case 845: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL VExplainType -//line sql.y:4582 +//line sql.y:4572 { yyLOCAL = PlanVExplainType } @@ -16512,7 +16505,7 @@ yydefault: case 846: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL VExplainType -//line sql.y:4586 +//line sql.y:4576 { yyLOCAL = AllVExplainType } @@ -16520,33 +16513,33 @@ yydefault: case 847: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL VExplainType -//line sql.y:4590 +//line sql.y:4580 { yyLOCAL = QueriesVExplainType } yyVAL.union = yyLOCAL case 848: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4596 +//line sql.y:4586 { yyVAL.str = yyDollar[1].str } case 849: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4600 +//line sql.y:4590 { yyVAL.str = yyDollar[1].str } case 850: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4604 +//line sql.y:4594 { yyVAL.str = yyDollar[1].str } case 851: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4610 +//line sql.y:4600 { yyLOCAL = yyDollar[1].selStmtUnion() } @@ -16554,7 +16547,7 @@ yydefault: case 852: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4614 +//line sql.y:4604 { yyLOCAL = yyDollar[1].statementUnion() } @@ -16562,7 +16555,7 @@ yydefault: case 853: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4618 +//line sql.y:4608 { yyLOCAL = yyDollar[1].statementUnion() } @@ -16570,33 +16563,33 @@ yydefault: case 854: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Statement -//line sql.y:4622 +//line sql.y:4612 { yyLOCAL = yyDollar[1].statementUnion() } yyVAL.union = yyLOCAL case 855: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4627 +//line sql.y:4617 { yyVAL.str = "" } case 856: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4631 +//line sql.y:4621 { yyVAL.str = yyDollar[1].identifierCI.val } case 857: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4635 +//line sql.y:4625 { yyVAL.str = encodeSQLString(yyDollar[1].str) } case 858: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4641 +//line sql.y:4631 { yyLOCAL = &ExplainTab{Table: yyDollar[3].tableName, Wild: yyDollar[4].str} } @@ -16604,7 +16597,7 @@ yydefault: case 859: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4645 +//line sql.y:4635 { yyLOCAL = &ExplainStmt{Type: yyDollar[3].explainTypeUnion(), Statement: yyDollar[4].statementUnion(), Comments: Comments(yyDollar[2].strs).Parsed()} } @@ -16612,7 +16605,7 @@ yydefault: case 860: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4651 +//line sql.y:4641 { yyLOCAL = &VExplainStmt{Type: yyDollar[3].vexplainTypeUnion(), Statement: yyDollar[4].statementUnion(), Comments: Comments(yyDollar[2].strs).Parsed()} } @@ -16620,7 +16613,7 @@ yydefault: case 861: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4657 +//line sql.y:4647 { yyLOCAL = &OtherAdmin{} } @@ -16628,7 +16621,7 @@ yydefault: case 862: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4661 +//line sql.y:4651 { yyLOCAL = &OtherAdmin{} } @@ -16636,7 +16629,7 @@ yydefault: case 863: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4667 +//line sql.y:4657 { yyLOCAL = &LockTables{Tables: yyDollar[3].tableAndLockTypesUnion()} } @@ -16644,14 +16637,14 @@ yydefault: case 864: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableAndLockTypes -//line sql.y:4673 +//line sql.y:4663 { yyLOCAL = TableAndLockTypes{yyDollar[1].tableAndLockTypeUnion()} } yyVAL.union = yyLOCAL case 865: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4677 +//line sql.y:4667 { yySLICE := (*TableAndLockTypes)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].tableAndLockTypeUnion()) @@ -16659,7 +16652,7 @@ yydefault: case 866: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *TableAndLockType -//line sql.y:4683 +//line sql.y:4673 { yyLOCAL = &TableAndLockType{Table: yyDollar[1].aliasedTableNameUnion(), Lock: yyDollar[2].lockTypeUnion()} } @@ -16667,7 +16660,7 @@ yydefault: case 867: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL LockType -//line sql.y:4689 +//line sql.y:4679 { yyLOCAL = Read } @@ -16675,7 +16668,7 @@ yydefault: case 868: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL LockType -//line sql.y:4693 +//line sql.y:4683 { yyLOCAL = ReadLocal } @@ -16683,7 +16676,7 @@ yydefault: case 869: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL LockType -//line sql.y:4697 +//line sql.y:4687 { yyLOCAL = Write } @@ -16691,7 +16684,7 @@ yydefault: case 870: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL LockType -//line sql.y:4701 +//line sql.y:4691 { yyLOCAL = LowPriorityWrite } @@ -16699,7 +16692,7 @@ yydefault: case 871: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Statement -//line sql.y:4707 +//line sql.y:4697 { yyLOCAL = &UnlockTables{} } @@ -16707,7 +16700,7 @@ yydefault: case 872: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4713 +//line sql.y:4703 { yyLOCAL = &RevertMigration{Comments: Comments(yyDollar[2].strs).Parsed(), UUID: string(yyDollar[4].str)} } @@ -16715,7 +16708,7 @@ yydefault: case 873: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4719 +//line sql.y:4709 { yyLOCAL = &Flush{IsLocal: yyDollar[2].booleanUnion(), FlushOptions: yyDollar[3].strs} } @@ -16723,7 +16716,7 @@ yydefault: case 874: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:4723 +//line sql.y:4713 { yyLOCAL = &Flush{IsLocal: yyDollar[2].booleanUnion()} } @@ -16731,7 +16724,7 @@ yydefault: case 875: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:4727 +//line sql.y:4717 { yyLOCAL = &Flush{IsLocal: yyDollar[2].booleanUnion(), WithLock: true} } @@ -16739,7 +16732,7 @@ yydefault: case 876: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4731 +//line sql.y:4721 { yyLOCAL = &Flush{IsLocal: yyDollar[2].booleanUnion(), TableNames: yyDollar[4].tableNamesUnion()} } @@ -16747,7 +16740,7 @@ yydefault: case 877: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Statement -//line sql.y:4735 +//line sql.y:4725 { yyLOCAL = &Flush{IsLocal: yyDollar[2].booleanUnion(), TableNames: yyDollar[4].tableNamesUnion(), WithLock: true} } @@ -16755,99 +16748,99 @@ yydefault: case 878: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Statement -//line sql.y:4739 +//line sql.y:4729 { yyLOCAL = &Flush{IsLocal: yyDollar[2].booleanUnion(), TableNames: yyDollar[4].tableNamesUnion(), ForExport: true} } yyVAL.union = yyLOCAL case 879: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4745 +//line sql.y:4735 { yyVAL.strs = []string{yyDollar[1].str} } case 880: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4749 +//line sql.y:4739 { yyVAL.strs = append(yyDollar[1].strs, yyDollar[3].str) } case 881: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4755 +//line sql.y:4745 { yyVAL.str = string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 882: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4759 +//line sql.y:4749 { yyVAL.str = string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 883: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4763 +//line sql.y:4753 { yyVAL.str = string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 884: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4767 +//line sql.y:4757 { yyVAL.str = string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 885: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4771 +//line sql.y:4761 { yyVAL.str = string(yyDollar[1].str) } case 886: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4775 +//line sql.y:4765 { yyVAL.str = string(yyDollar[1].str) } case 887: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4779 +//line sql.y:4769 { yyVAL.str = string(yyDollar[1].str) } case 888: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4783 +//line sql.y:4773 { yyVAL.str = string(yyDollar[1].str) + " " + string(yyDollar[2].str) + yyDollar[3].str } case 889: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4787 +//line sql.y:4777 { yyVAL.str = string(yyDollar[1].str) + " " + string(yyDollar[2].str) } case 890: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4791 +//line sql.y:4781 { yyVAL.str = string(yyDollar[1].str) } case 891: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4795 +//line sql.y:4785 { yyVAL.str = string(yyDollar[1].str) } case 892: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4799 +//line sql.y:4789 { yyVAL.str = string(yyDollar[1].str) } case 893: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:4804 +//line sql.y:4794 { yyLOCAL = false } @@ -16855,7 +16848,7 @@ yydefault: case 894: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4808 +//line sql.y:4798 { yyLOCAL = true } @@ -16863,52 +16856,52 @@ yydefault: case 895: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4812 +//line sql.y:4802 { yyLOCAL = true } yyVAL.union = yyLOCAL case 896: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4817 +//line sql.y:4807 { yyVAL.str = "" } case 897: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4821 +//line sql.y:4811 { yyVAL.str = " " + string(yyDollar[1].str) + " " + string(yyDollar[2].str) + " " + yyDollar[3].identifierCI.String() } case 898: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4826 +//line sql.y:4816 { setAllowComments(yylex, true) } case 899: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4830 +//line sql.y:4820 { yyVAL.strs = yyDollar[2].strs setAllowComments(yylex, false) } case 900: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4836 +//line sql.y:4826 { yyVAL.strs = nil } case 901: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4840 +//line sql.y:4830 { yyVAL.strs = append(yyDollar[1].strs, yyDollar[2].str) } case 902: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4846 +//line sql.y:4836 { yyLOCAL = true } @@ -16916,7 +16909,7 @@ yydefault: case 903: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:4850 +//line sql.y:4840 { yyLOCAL = false } @@ -16924,33 +16917,33 @@ yydefault: case 904: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:4854 +//line sql.y:4844 { yyLOCAL = true } yyVAL.union = yyLOCAL case 905: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4859 +//line sql.y:4849 { yyVAL.str = "" } case 906: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4863 +//line sql.y:4853 { yyVAL.str = SQLNoCacheStr } case 907: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4867 +//line sql.y:4857 { yyVAL.str = SQLCacheStr } case 908: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:4872 +//line sql.y:4862 { yyLOCAL = false } @@ -16958,7 +16951,7 @@ yydefault: case 909: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4876 +//line sql.y:4866 { yyLOCAL = true } @@ -16966,7 +16959,7 @@ yydefault: case 910: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:4880 +//line sql.y:4870 { yyLOCAL = true } @@ -16974,7 +16967,7 @@ yydefault: case 911: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4886 +//line sql.y:4876 { yyLOCAL = &PrepareStmt{Name: yyDollar[3].identifierCI, Comments: Comments(yyDollar[2].strs).Parsed(), Statement: yyDollar[5].exprUnion()} } @@ -16982,7 +16975,7 @@ yydefault: case 912: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:4890 +//line sql.y:4880 { yyLOCAL = &PrepareStmt{ Name: yyDollar[3].identifierCI, @@ -16994,7 +16987,7 @@ yydefault: case 913: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4900 +//line sql.y:4890 { yyLOCAL = &ExecuteStmt{Name: yyDollar[3].identifierCI, Comments: Comments(yyDollar[2].strs).Parsed(), Arguments: yyDollar[4].variablesUnion()} } @@ -17002,7 +16995,7 @@ yydefault: case 914: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []*Variable -//line sql.y:4905 +//line sql.y:4895 { yyLOCAL = nil } @@ -17010,7 +17003,7 @@ yydefault: case 915: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL []*Variable -//line sql.y:4909 +//line sql.y:4899 { yyLOCAL = yyDollar[2].variablesUnion() } @@ -17018,7 +17011,7 @@ yydefault: case 916: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4915 +//line sql.y:4905 { yyLOCAL = &DeallocateStmt{Comments: Comments(yyDollar[2].strs).Parsed(), Name: yyDollar[4].identifierCI} } @@ -17026,7 +17019,7 @@ yydefault: case 917: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Statement -//line sql.y:4919 +//line sql.y:4909 { yyLOCAL = &DeallocateStmt{Comments: Comments(yyDollar[2].strs).Parsed(), Name: yyDollar[4].identifierCI} } @@ -17034,7 +17027,7 @@ yydefault: case 918: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL SelectExprs -//line sql.y:4924 +//line sql.y:4914 { yyLOCAL = nil } @@ -17042,94 +17035,94 @@ yydefault: case 919: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectExprs -//line sql.y:4928 +//line sql.y:4918 { yyLOCAL = yyDollar[1].selectExprsUnion() } yyVAL.union = yyLOCAL case 920: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:4933 +//line sql.y:4923 { yyVAL.strs = nil } case 921: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4937 +//line sql.y:4927 { yyVAL.strs = []string{yyDollar[1].str} } case 922: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:4941 +//line sql.y:4931 { // TODO: This is a hack since I couldn't get it to work in a nicer way. I got 'conflicts: 8 shift/reduce' yyVAL.strs = []string{yyDollar[1].str, yyDollar[2].str} } case 923: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4945 +//line sql.y:4935 { yyVAL.strs = []string{yyDollar[1].str, yyDollar[2].str, yyDollar[3].str} } case 924: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:4949 +//line sql.y:4939 { yyVAL.strs = []string{yyDollar[1].str, yyDollar[2].str, yyDollar[3].str, yyDollar[4].str} } case 925: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4955 +//line sql.y:4945 { yyVAL.str = SQLNoCacheStr } case 926: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4959 +//line sql.y:4949 { yyVAL.str = SQLCacheStr } case 927: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4963 +//line sql.y:4953 { yyVAL.str = DistinctStr } case 928: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4967 +//line sql.y:4957 { yyVAL.str = DistinctStr } case 929: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4971 +//line sql.y:4961 { yyVAL.str = StraightJoinHint } case 930: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4975 +//line sql.y:4965 { yyVAL.str = SQLCalcFoundRowsStr } case 931: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:4979 +//line sql.y:4969 { yyVAL.str = AllStr // These are not picked up by NewSelect, and so ALL will be dropped. But this is OK, since it's redundant anyway } case 932: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectExprs -//line sql.y:4985 +//line sql.y:4975 { yyLOCAL = SelectExprs{yyDollar[1].selectExprUnion()} } yyVAL.union = yyLOCAL case 933: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:4989 +//line sql.y:4979 { yySLICE := (*SelectExprs)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].selectExprUnion()) @@ -17137,7 +17130,7 @@ yydefault: case 934: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL SelectExpr -//line sql.y:4995 +//line sql.y:4985 { yyLOCAL = &StarExpr{} } @@ -17145,7 +17138,7 @@ yydefault: case 935: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL SelectExpr -//line sql.y:4999 +//line sql.y:4989 { yyLOCAL = &AliasedExpr{Expr: yyDollar[1].exprUnion(), As: yyDollar[2].identifierCI} } @@ -17153,7 +17146,7 @@ yydefault: case 936: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL SelectExpr -//line sql.y:5003 +//line sql.y:4993 { yyLOCAL = &StarExpr{TableName: TableName{Name: yyDollar[1].identifierCS}} } @@ -17161,39 +17154,39 @@ yydefault: case 937: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL SelectExpr -//line sql.y:5007 +//line sql.y:4997 { yyLOCAL = &StarExpr{TableName: TableName{Qualifier: yyDollar[1].identifierCS, Name: yyDollar[3].identifierCS}} } yyVAL.union = yyLOCAL case 938: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5012 +//line sql.y:5002 { yyVAL.identifierCI = IdentifierCI{} } case 939: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5016 +//line sql.y:5006 { yyVAL.identifierCI = yyDollar[1].identifierCI } case 940: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:5020 +//line sql.y:5010 { yyVAL.identifierCI = yyDollar[2].identifierCI } case 942: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5027 +//line sql.y:5017 { yyVAL.identifierCI = NewIdentifierCI(string(yyDollar[1].str)) } case 943: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL TableExprs -//line sql.y:5032 +//line sql.y:5022 { yyLOCAL = TableExprs{&AliasedTableExpr{Expr: TableName{Name: NewIdentifierCS("dual")}}} } @@ -17201,7 +17194,7 @@ yydefault: case 944: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableExprs -//line sql.y:5036 +//line sql.y:5026 { yyLOCAL = yyDollar[1].tableExprsUnion() } @@ -17209,7 +17202,7 @@ yydefault: case 945: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL TableExprs -//line sql.y:5042 +//line sql.y:5032 { yyLOCAL = yyDollar[2].tableExprsUnion() } @@ -17217,14 +17210,14 @@ yydefault: case 946: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableExprs -//line sql.y:5048 +//line sql.y:5038 { yyLOCAL = TableExprs{yyDollar[1].tableExprUnion()} } yyVAL.union = yyLOCAL case 947: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5052 +//line sql.y:5042 { yySLICE := (*TableExprs)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].tableExprUnion()) @@ -17232,7 +17225,7 @@ yydefault: case 950: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5062 +//line sql.y:5052 { yyLOCAL = yyDollar[1].aliasedTableNameUnion() } @@ -17240,7 +17233,7 @@ yydefault: case 951: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5066 +//line sql.y:5056 { yyLOCAL = &AliasedTableExpr{Expr: yyDollar[1].derivedTableUnion(), As: yyDollar[3].identifierCS, Columns: yyDollar[4].columnsUnion()} } @@ -17248,7 +17241,7 @@ yydefault: case 952: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5070 +//line sql.y:5060 { yyLOCAL = &ParenTableExpr{Exprs: yyDollar[2].tableExprsUnion()} } @@ -17256,7 +17249,7 @@ yydefault: case 953: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5074 +//line sql.y:5064 { yyLOCAL = yyDollar[1].tableExprUnion() } @@ -17264,7 +17257,7 @@ yydefault: case 954: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *DerivedTable -//line sql.y:5080 +//line sql.y:5070 { yyLOCAL = &DerivedTable{Lateral: false, Select: yyDollar[1].selStmtUnion()} } @@ -17272,7 +17265,7 @@ yydefault: case 955: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *DerivedTable -//line sql.y:5084 +//line sql.y:5074 { yyLOCAL = &DerivedTable{Lateral: true, Select: yyDollar[2].selStmtUnion()} } @@ -17280,7 +17273,7 @@ yydefault: case 956: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *AliasedTableExpr -//line sql.y:5090 +//line sql.y:5080 { yyLOCAL = &AliasedTableExpr{Expr: yyDollar[1].tableName, As: yyDollar[2].identifierCS, Hints: yyDollar[3].indexHintsUnion()} } @@ -17288,7 +17281,7 @@ yydefault: case 957: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL *AliasedTableExpr -//line sql.y:5094 +//line sql.y:5084 { yyLOCAL = &AliasedTableExpr{Expr: yyDollar[1].tableName, Partitions: yyDollar[4].partitionsUnion(), As: yyDollar[6].identifierCS, Hints: yyDollar[7].indexHintsUnion()} } @@ -17296,7 +17289,7 @@ yydefault: case 958: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Columns -//line sql.y:5099 +//line sql.y:5089 { yyLOCAL = nil } @@ -17304,7 +17297,7 @@ yydefault: case 959: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Columns -//line sql.y:5103 +//line sql.y:5093 { yyLOCAL = yyDollar[2].columnsUnion() } @@ -17312,7 +17305,7 @@ yydefault: case 960: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Columns -//line sql.y:5108 +//line sql.y:5098 { yyLOCAL = nil } @@ -17320,7 +17313,7 @@ yydefault: case 961: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Columns -//line sql.y:5112 +//line sql.y:5102 { yyLOCAL = yyDollar[1].columnsUnion() } @@ -17328,14 +17321,14 @@ yydefault: case 962: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Columns -//line sql.y:5118 +//line sql.y:5108 { yyLOCAL = Columns{yyDollar[1].identifierCI} } yyVAL.union = yyLOCAL case 963: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5122 +//line sql.y:5112 { yySLICE := (*Columns)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].identifierCI) @@ -17343,14 +17336,14 @@ yydefault: case 964: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*Variable -//line sql.y:5128 +//line sql.y:5118 { yyLOCAL = []*Variable{yyDollar[1].variableUnion()} } yyVAL.union = yyLOCAL case 965: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5132 +//line sql.y:5122 { yySLICE := (*[]*Variable)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].variableUnion()) @@ -17358,7 +17351,7 @@ yydefault: case 966: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Columns -//line sql.y:5138 +//line sql.y:5128 { yyLOCAL = Columns{yyDollar[1].identifierCI} } @@ -17366,21 +17359,21 @@ yydefault: case 967: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Columns -//line sql.y:5142 +//line sql.y:5132 { yyLOCAL = Columns{NewIdentifierCI(string(yyDollar[1].str))} } yyVAL.union = yyLOCAL case 968: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5146 +//line sql.y:5136 { yySLICE := (*Columns)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].identifierCI) } case 969: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5150 +//line sql.y:5140 { yySLICE := (*Columns)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, NewIdentifierCI(string(yyDollar[3].str))) @@ -17388,14 +17381,14 @@ yydefault: case 970: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Partitions -//line sql.y:5156 +//line sql.y:5146 { yyLOCAL = Partitions{yyDollar[1].identifierCI} } yyVAL.union = yyLOCAL case 971: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5160 +//line sql.y:5150 { yySLICE := (*Partitions)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].identifierCI) @@ -17403,7 +17396,7 @@ yydefault: case 972: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5173 +//line sql.y:5163 { yyLOCAL = &JoinTableExpr{LeftExpr: yyDollar[1].tableExprUnion(), Join: yyDollar[2].joinTypeUnion(), RightExpr: yyDollar[3].tableExprUnion(), Condition: yyDollar[4].joinCondition} } @@ -17411,7 +17404,7 @@ yydefault: case 973: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5177 +//line sql.y:5167 { yyLOCAL = &JoinTableExpr{LeftExpr: yyDollar[1].tableExprUnion(), Join: yyDollar[2].joinTypeUnion(), RightExpr: yyDollar[3].tableExprUnion(), Condition: yyDollar[4].joinCondition} } @@ -17419,7 +17412,7 @@ yydefault: case 974: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5181 +//line sql.y:5171 { yyLOCAL = &JoinTableExpr{LeftExpr: yyDollar[1].tableExprUnion(), Join: yyDollar[2].joinTypeUnion(), RightExpr: yyDollar[3].tableExprUnion(), Condition: yyDollar[4].joinCondition} } @@ -17427,87 +17420,87 @@ yydefault: case 975: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL TableExpr -//line sql.y:5185 +//line sql.y:5175 { yyLOCAL = &JoinTableExpr{LeftExpr: yyDollar[1].tableExprUnion(), Join: yyDollar[2].joinTypeUnion(), RightExpr: yyDollar[3].tableExprUnion()} } yyVAL.union = yyLOCAL case 976: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:5191 +//line sql.y:5181 { yyVAL.joinCondition = &JoinCondition{On: yyDollar[2].exprUnion()} } case 977: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:5193 +//line sql.y:5183 { yyVAL.joinCondition = &JoinCondition{Using: yyDollar[3].columnsUnion()} } case 978: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5197 +//line sql.y:5187 { yyVAL.joinCondition = &JoinCondition{} } case 979: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5199 +//line sql.y:5189 { yyVAL.joinCondition = yyDollar[1].joinCondition } case 980: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5203 +//line sql.y:5193 { yyVAL.joinCondition = &JoinCondition{} } case 981: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:5205 +//line sql.y:5195 { yyVAL.joinCondition = &JoinCondition{On: yyDollar[2].exprUnion()} } case 982: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5208 +//line sql.y:5198 { yyVAL.empty = struct{}{} } case 983: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5210 +//line sql.y:5200 { yyVAL.empty = struct{}{} } case 984: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5213 +//line sql.y:5203 { yyVAL.identifierCS = NewIdentifierCS("") } case 985: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5217 +//line sql.y:5207 { yyVAL.identifierCS = yyDollar[1].identifierCS } case 986: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:5221 +//line sql.y:5211 { yyVAL.identifierCS = yyDollar[2].identifierCS } case 988: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5228 +//line sql.y:5218 { yyVAL.identifierCS = NewIdentifierCS(string(yyDollar[1].str)) } case 989: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL JoinType -//line sql.y:5234 +//line sql.y:5224 { yyLOCAL = NormalJoinType } @@ -17515,7 +17508,7 @@ yydefault: case 990: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL JoinType -//line sql.y:5238 +//line sql.y:5228 { yyLOCAL = NormalJoinType } @@ -17523,7 +17516,7 @@ yydefault: case 991: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL JoinType -//line sql.y:5242 +//line sql.y:5232 { yyLOCAL = NormalJoinType } @@ -17531,7 +17524,7 @@ yydefault: case 992: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL JoinType -//line sql.y:5248 +//line sql.y:5238 { yyLOCAL = StraightJoinType } @@ -17539,7 +17532,7 @@ yydefault: case 993: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL JoinType -//line sql.y:5254 +//line sql.y:5244 { yyLOCAL = LeftJoinType } @@ -17547,7 +17540,7 @@ yydefault: case 994: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL JoinType -//line sql.y:5258 +//line sql.y:5248 { yyLOCAL = LeftJoinType } @@ -17555,7 +17548,7 @@ yydefault: case 995: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL JoinType -//line sql.y:5262 +//line sql.y:5252 { yyLOCAL = RightJoinType } @@ -17563,7 +17556,7 @@ yydefault: case 996: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL JoinType -//line sql.y:5266 +//line sql.y:5256 { yyLOCAL = RightJoinType } @@ -17571,7 +17564,7 @@ yydefault: case 997: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL JoinType -//line sql.y:5272 +//line sql.y:5262 { yyLOCAL = NaturalJoinType } @@ -17579,7 +17572,7 @@ yydefault: case 998: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL JoinType -//line sql.y:5276 +//line sql.y:5266 { if yyDollar[2].joinTypeUnion() == LeftJoinType { yyLOCAL = NaturalLeftJoinType @@ -17590,38 +17583,38 @@ yydefault: yyVAL.union = yyLOCAL case 999: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:5286 +//line sql.y:5276 { yyVAL.tableName = yyDollar[2].tableName } case 1000: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5290 +//line sql.y:5280 { yyVAL.tableName = yyDollar[1].tableName } case 1001: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5296 +//line sql.y:5286 { yyVAL.tableName = TableName{Name: yyDollar[1].identifierCS} } case 1002: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5300 +//line sql.y:5290 { yyVAL.tableName = TableName{Qualifier: yyDollar[1].identifierCS, Name: yyDollar[3].identifierCS} } case 1003: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5306 +//line sql.y:5296 { yyVAL.tableName = TableName{Name: yyDollar[1].identifierCS} } case 1004: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL IndexHints -//line sql.y:5311 +//line sql.y:5301 { yyLOCAL = nil } @@ -17629,7 +17622,7 @@ yydefault: case 1005: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IndexHints -//line sql.y:5315 +//line sql.y:5305 { yyLOCAL = yyDollar[1].indexHintsUnion() } @@ -17637,14 +17630,14 @@ yydefault: case 1006: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IndexHints -//line sql.y:5321 +//line sql.y:5311 { yyLOCAL = IndexHints{yyDollar[1].indexHintUnion()} } yyVAL.union = yyLOCAL case 1007: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:5325 +//line sql.y:5315 { yySLICE := (*IndexHints)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].indexHintUnion()) @@ -17652,7 +17645,7 @@ yydefault: case 1008: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *IndexHint -//line sql.y:5331 +//line sql.y:5321 { yyLOCAL = &IndexHint{Type: UseOp, ForType: yyDollar[3].indexHintForTypeUnion(), Indexes: yyDollar[5].columnsUnion()} } @@ -17660,7 +17653,7 @@ yydefault: case 1009: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *IndexHint -//line sql.y:5335 +//line sql.y:5325 { yyLOCAL = &IndexHint{Type: UseOp, ForType: yyDollar[3].indexHintForTypeUnion()} } @@ -17668,7 +17661,7 @@ yydefault: case 1010: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *IndexHint -//line sql.y:5339 +//line sql.y:5329 { yyLOCAL = &IndexHint{Type: IgnoreOp, ForType: yyDollar[3].indexHintForTypeUnion(), Indexes: yyDollar[5].columnsUnion()} } @@ -17676,7 +17669,7 @@ yydefault: case 1011: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL *IndexHint -//line sql.y:5343 +//line sql.y:5333 { yyLOCAL = &IndexHint{Type: ForceOp, ForType: yyDollar[3].indexHintForTypeUnion(), Indexes: yyDollar[5].columnsUnion()} } @@ -17684,7 +17677,7 @@ yydefault: case 1012: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL IndexHintForType -//line sql.y:5348 +//line sql.y:5338 { yyLOCAL = NoForType } @@ -17692,7 +17685,7 @@ yydefault: case 1013: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL IndexHintForType -//line sql.y:5352 +//line sql.y:5342 { yyLOCAL = JoinForType } @@ -17700,7 +17693,7 @@ yydefault: case 1014: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL IndexHintForType -//line sql.y:5356 +//line sql.y:5346 { yyLOCAL = OrderByForType } @@ -17708,7 +17701,7 @@ yydefault: case 1015: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL IndexHintForType -//line sql.y:5360 +//line sql.y:5350 { yyLOCAL = GroupByForType } @@ -17716,7 +17709,7 @@ yydefault: case 1016: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Expr -//line sql.y:5366 +//line sql.y:5356 { yyLOCAL = nil } @@ -17724,7 +17717,7 @@ yydefault: case 1017: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5370 +//line sql.y:5360 { yyLOCAL = yyDollar[2].exprUnion() } @@ -17732,7 +17725,7 @@ yydefault: case 1018: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5377 +//line sql.y:5367 { yyLOCAL = &OrExpr{Left: yyDollar[1].exprUnion(), Right: yyDollar[3].exprUnion()} } @@ -17740,7 +17733,7 @@ yydefault: case 1019: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5381 +//line sql.y:5371 { yyLOCAL = &XorExpr{Left: yyDollar[1].exprUnion(), Right: yyDollar[3].exprUnion()} } @@ -17748,7 +17741,7 @@ yydefault: case 1020: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5385 +//line sql.y:5375 { yyLOCAL = &AndExpr{Left: yyDollar[1].exprUnion(), Right: yyDollar[3].exprUnion()} } @@ -17756,7 +17749,7 @@ yydefault: case 1021: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5389 +//line sql.y:5379 { yyLOCAL = &NotExpr{Expr: yyDollar[2].exprUnion()} } @@ -17764,7 +17757,7 @@ yydefault: case 1022: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5393 +//line sql.y:5383 { yyLOCAL = &IsExpr{Left: yyDollar[1].exprUnion(), Right: yyDollar[3].isExprOperatorUnion()} } @@ -17772,7 +17765,7 @@ yydefault: case 1023: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5397 +//line sql.y:5387 { yyLOCAL = yyDollar[1].exprUnion() } @@ -17780,7 +17773,7 @@ yydefault: case 1024: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5401 +//line sql.y:5391 { yyLOCAL = &AssignmentExpr{Left: yyDollar[1].variableUnion(), Right: yyDollar[3].exprUnion()} } @@ -17788,7 +17781,7 @@ yydefault: case 1025: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5405 +//line sql.y:5395 { yyLOCAL = &MemberOfExpr{Value: yyDollar[1].exprUnion(), JSONArr: yyDollar[5].exprUnion()} } @@ -17796,7 +17789,7 @@ yydefault: case 1026: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5411 +//line sql.y:5401 { yyLOCAL = &IsExpr{Left: yyDollar[1].exprUnion(), Right: IsNullOp} } @@ -17804,7 +17797,7 @@ yydefault: case 1027: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:5415 +//line sql.y:5405 { yyLOCAL = &IsExpr{Left: yyDollar[1].exprUnion(), Right: IsNotNullOp} } @@ -17812,7 +17805,7 @@ yydefault: case 1028: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5419 +//line sql.y:5409 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: yyDollar[2].comparisonExprOperatorUnion(), Right: yyDollar[3].exprUnion()} } @@ -17820,7 +17813,7 @@ yydefault: case 1029: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5423 +//line sql.y:5413 { yyLOCAL = yyDollar[1].exprUnion() } @@ -17828,7 +17821,7 @@ yydefault: case 1030: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5429 +//line sql.y:5419 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: InOp, Right: yyDollar[3].colTupleUnion()} } @@ -17836,7 +17829,7 @@ yydefault: case 1031: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:5433 +//line sql.y:5423 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: NotInOp, Right: yyDollar[4].colTupleUnion()} } @@ -17844,7 +17837,7 @@ yydefault: case 1032: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:5437 +//line sql.y:5427 { yyLOCAL = &BetweenExpr{Left: yyDollar[1].exprUnion(), IsBetween: true, From: yyDollar[3].exprUnion(), To: yyDollar[5].exprUnion()} } @@ -17852,7 +17845,7 @@ yydefault: case 1033: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5441 +//line sql.y:5431 { yyLOCAL = &BetweenExpr{Left: yyDollar[1].exprUnion(), IsBetween: false, From: yyDollar[4].exprUnion(), To: yyDollar[6].exprUnion()} } @@ -17860,7 +17853,7 @@ yydefault: case 1034: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5445 +//line sql.y:5435 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: LikeOp, Right: yyDollar[3].exprUnion()} } @@ -17868,7 +17861,7 @@ yydefault: case 1035: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:5449 +//line sql.y:5439 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: NotLikeOp, Right: yyDollar[4].exprUnion()} } @@ -17876,7 +17869,7 @@ yydefault: case 1036: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:5453 +//line sql.y:5443 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: LikeOp, Right: yyDollar[3].exprUnion(), Escape: yyDollar[5].exprUnion()} } @@ -17884,7 +17877,7 @@ yydefault: case 1037: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5457 +//line sql.y:5447 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: NotLikeOp, Right: yyDollar[4].exprUnion(), Escape: yyDollar[6].exprUnion()} } @@ -17892,7 +17885,7 @@ yydefault: case 1038: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5461 +//line sql.y:5451 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: RegexpOp, Right: yyDollar[3].exprUnion()} } @@ -17900,7 +17893,7 @@ yydefault: case 1039: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:5465 +//line sql.y:5455 { yyLOCAL = &ComparisonExpr{Left: yyDollar[1].exprUnion(), Operator: NotRegexpOp, Right: yyDollar[4].exprUnion()} } @@ -17908,25 +17901,25 @@ yydefault: case 1040: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5469 +//line sql.y:5459 { yyLOCAL = yyDollar[1].exprUnion() } yyVAL.union = yyLOCAL case 1041: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5475 +//line sql.y:5465 { } case 1042: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5478 +//line sql.y:5468 { } case 1043: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5484 +//line sql.y:5474 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: BitOrOp, Right: yyDollar[3].exprUnion()} } @@ -17934,7 +17927,7 @@ yydefault: case 1044: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5488 +//line sql.y:5478 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: BitAndOp, Right: yyDollar[3].exprUnion()} } @@ -17942,7 +17935,7 @@ yydefault: case 1045: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5492 +//line sql.y:5482 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: ShiftLeftOp, Right: yyDollar[3].exprUnion()} } @@ -17950,7 +17943,7 @@ yydefault: case 1046: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5496 +//line sql.y:5486 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: ShiftRightOp, Right: yyDollar[3].exprUnion()} } @@ -17958,7 +17951,7 @@ yydefault: case 1047: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5500 +//line sql.y:5490 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: PlusOp, Right: yyDollar[3].exprUnion()} } @@ -17966,7 +17959,7 @@ yydefault: case 1048: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5504 +//line sql.y:5494 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: MinusOp, Right: yyDollar[3].exprUnion()} } @@ -17974,7 +17967,7 @@ yydefault: case 1049: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:5508 +//line sql.y:5498 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprBinaryAdd, Date: yyDollar[1].exprUnion(), Unit: yyDollar[5].intervalTypeUnion(), Interval: yyDollar[4].exprUnion()} } @@ -17982,7 +17975,7 @@ yydefault: case 1050: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:5512 +//line sql.y:5502 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprBinarySub, Date: yyDollar[1].exprUnion(), Unit: yyDollar[5].intervalTypeUnion(), Interval: yyDollar[4].exprUnion()} } @@ -17990,7 +17983,7 @@ yydefault: case 1051: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5516 +//line sql.y:5506 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: MultOp, Right: yyDollar[3].exprUnion()} } @@ -17998,7 +17991,7 @@ yydefault: case 1052: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5520 +//line sql.y:5510 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: DivOp, Right: yyDollar[3].exprUnion()} } @@ -18006,7 +17999,7 @@ yydefault: case 1053: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5524 +//line sql.y:5514 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: ModOp, Right: yyDollar[3].exprUnion()} } @@ -18014,7 +18007,7 @@ yydefault: case 1054: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5528 +//line sql.y:5518 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: IntDivOp, Right: yyDollar[3].exprUnion()} } @@ -18022,7 +18015,7 @@ yydefault: case 1055: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5532 +//line sql.y:5522 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: ModOp, Right: yyDollar[3].exprUnion()} } @@ -18030,7 +18023,7 @@ yydefault: case 1056: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5536 +//line sql.y:5526 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: BitXorOp, Right: yyDollar[3].exprUnion()} } @@ -18038,7 +18031,7 @@ yydefault: case 1057: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5540 +//line sql.y:5530 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18046,7 +18039,7 @@ yydefault: case 1058: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5546 +//line sql.y:5536 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18054,7 +18047,7 @@ yydefault: case 1059: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5550 +//line sql.y:5540 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18062,7 +18055,7 @@ yydefault: case 1060: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5554 +//line sql.y:5544 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18070,7 +18063,7 @@ yydefault: case 1061: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5558 +//line sql.y:5548 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18078,7 +18071,7 @@ yydefault: case 1062: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5562 +//line sql.y:5552 { yyLOCAL = &CollateExpr{Expr: yyDollar[1].exprUnion(), Collation: yyDollar[3].str} } @@ -18086,7 +18079,7 @@ yydefault: case 1063: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5566 +//line sql.y:5556 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18094,7 +18087,7 @@ yydefault: case 1064: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5570 +//line sql.y:5560 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18102,7 +18095,7 @@ yydefault: case 1065: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5574 +//line sql.y:5564 { yyLOCAL = yyDollar[1].variableUnion() } @@ -18110,7 +18103,7 @@ yydefault: case 1066: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5578 +//line sql.y:5568 { yyLOCAL = yyDollar[2].exprUnion() // TODO: do we really want to ignore unary '+' before any kind of literals? } @@ -18118,7 +18111,7 @@ yydefault: case 1067: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5582 +//line sql.y:5572 { yyLOCAL = &UnaryExpr{Operator: UMinusOp, Expr: yyDollar[2].exprUnion()} } @@ -18126,7 +18119,7 @@ yydefault: case 1068: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5586 +//line sql.y:5576 { yyLOCAL = &UnaryExpr{Operator: TildaOp, Expr: yyDollar[2].exprUnion()} } @@ -18134,7 +18127,7 @@ yydefault: case 1069: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5590 +//line sql.y:5580 { yyLOCAL = &UnaryExpr{Operator: BangOp, Expr: yyDollar[2].exprUnion()} } @@ -18142,7 +18135,7 @@ yydefault: case 1070: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5594 +//line sql.y:5584 { yyLOCAL = yyDollar[1].subqueryUnion() } @@ -18150,7 +18143,7 @@ yydefault: case 1071: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:5598 +//line sql.y:5588 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18158,7 +18151,7 @@ yydefault: case 1072: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5602 +//line sql.y:5592 { yyLOCAL = &ExistsExpr{Subquery: yyDollar[2].subqueryUnion()} } @@ -18166,7 +18159,7 @@ yydefault: case 1073: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Expr -//line sql.y:5606 +//line sql.y:5596 { yyLOCAL = &MatchExpr{Columns: yyDollar[2].colNamesUnion(), Expr: yyDollar[5].exprUnion(), Option: yyDollar[6].matchExprOptionUnion()} } @@ -18174,7 +18167,7 @@ yydefault: case 1074: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Expr -//line sql.y:5610 +//line sql.y:5600 { yyLOCAL = &CastExpr{Expr: yyDollar[3].exprUnion(), Type: yyDollar[5].convertTypeUnion(), Array: yyDollar[6].booleanUnion()} } @@ -18182,7 +18175,7 @@ yydefault: case 1075: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5614 +//line sql.y:5604 { yyLOCAL = &ConvertExpr{Expr: yyDollar[3].exprUnion(), Type: yyDollar[5].convertTypeUnion()} } @@ -18190,7 +18183,7 @@ yydefault: case 1076: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5618 +//line sql.y:5608 { yyLOCAL = &ConvertUsingExpr{Expr: yyDollar[3].exprUnion(), Type: yyDollar[5].str} } @@ -18198,7 +18191,7 @@ yydefault: case 1077: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5622 +//line sql.y:5612 { // From: https://dev.mysql.com/doc/refman/8.0/en/cast-functions.html#operator_binary // To convert a string expression to a binary string, these constructs are equivalent: @@ -18210,7 +18203,7 @@ yydefault: case 1078: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:5630 +//line sql.y:5620 { yyLOCAL = &Default{ColName: yyDollar[2].str} } @@ -18218,7 +18211,7 @@ yydefault: case 1079: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:5634 +//line sql.y:5624 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprBinaryAddLeft, Date: yyDollar[5].exprUnion(), Unit: yyDollar[3].intervalTypeUnion(), Interval: yyDollar[2].exprUnion()} } @@ -18226,7 +18219,7 @@ yydefault: case 1080: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5638 +//line sql.y:5628 { yyLOCAL = &IntervalFuncExpr{Expr: yyDollar[3].exprUnion(), Exprs: yyDollar[5].exprsUnion()} } @@ -18234,7 +18227,7 @@ yydefault: case 1081: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5642 +//line sql.y:5632 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: JSONExtractOp, Right: yyDollar[3].exprUnion()} } @@ -18242,7 +18235,7 @@ yydefault: case 1082: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:5646 +//line sql.y:5636 { yyLOCAL = &BinaryExpr{Left: yyDollar[1].exprUnion(), Operator: JSONUnquoteExtractOp, Right: yyDollar[3].exprUnion()} } @@ -18250,7 +18243,7 @@ yydefault: case 1083: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*ColName -//line sql.y:5652 +//line sql.y:5642 { yyLOCAL = yyDollar[1].colNamesUnion() } @@ -18258,7 +18251,7 @@ yydefault: case 1084: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL []*ColName -//line sql.y:5656 +//line sql.y:5646 { yyLOCAL = yyDollar[2].colNamesUnion() } @@ -18266,14 +18259,14 @@ yydefault: case 1085: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*ColName -//line sql.y:5662 +//line sql.y:5652 { yyLOCAL = []*ColName{yyDollar[1].colNameUnion()} } yyVAL.union = yyLOCAL case 1086: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5666 +//line sql.y:5656 { yySLICE := (*[]*ColName)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].colNameUnion()) @@ -18281,7 +18274,7 @@ yydefault: case 1087: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TrimType -//line sql.y:5672 +//line sql.y:5662 { yyLOCAL = BothTrimType } @@ -18289,7 +18282,7 @@ yydefault: case 1088: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TrimType -//line sql.y:5676 +//line sql.y:5666 { yyLOCAL = LeadingTrimType } @@ -18297,7 +18290,7 @@ yydefault: case 1089: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL TrimType -//line sql.y:5680 +//line sql.y:5670 { yyLOCAL = TrailingTrimType } @@ -18305,7 +18298,7 @@ yydefault: case 1090: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL FrameUnitType -//line sql.y:5686 +//line sql.y:5676 { yyLOCAL = FrameRowsType } @@ -18313,7 +18306,7 @@ yydefault: case 1091: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL FrameUnitType -//line sql.y:5690 +//line sql.y:5680 { yyLOCAL = FrameRangeType } @@ -18321,7 +18314,7 @@ yydefault: case 1092: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ArgumentLessWindowExprType -//line sql.y:5697 +//line sql.y:5687 { yyLOCAL = CumeDistExprType } @@ -18329,7 +18322,7 @@ yydefault: case 1093: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ArgumentLessWindowExprType -//line sql.y:5701 +//line sql.y:5691 { yyLOCAL = DenseRankExprType } @@ -18337,7 +18330,7 @@ yydefault: case 1094: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ArgumentLessWindowExprType -//line sql.y:5705 +//line sql.y:5695 { yyLOCAL = PercentRankExprType } @@ -18345,7 +18338,7 @@ yydefault: case 1095: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ArgumentLessWindowExprType -//line sql.y:5709 +//line sql.y:5699 { yyLOCAL = RankExprType } @@ -18353,7 +18346,7 @@ yydefault: case 1096: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ArgumentLessWindowExprType -//line sql.y:5713 +//line sql.y:5703 { yyLOCAL = RowNumberExprType } @@ -18361,7 +18354,7 @@ yydefault: case 1097: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5719 +//line sql.y:5709 { yyLOCAL = &FramePoint{Type: CurrentRowType} } @@ -18369,7 +18362,7 @@ yydefault: case 1098: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5723 +//line sql.y:5713 { yyLOCAL = &FramePoint{Type: UnboundedPrecedingType} } @@ -18377,7 +18370,7 @@ yydefault: case 1099: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5727 +//line sql.y:5717 { yyLOCAL = &FramePoint{Type: UnboundedFollowingType} } @@ -18385,7 +18378,7 @@ yydefault: case 1100: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5731 +//line sql.y:5721 { yyLOCAL = &FramePoint{Type: ExprPrecedingType, Expr: yyDollar[1].exprUnion()} } @@ -18393,7 +18386,7 @@ yydefault: case 1101: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5735 +//line sql.y:5725 { yyLOCAL = &FramePoint{Type: ExprPrecedingType, Expr: yyDollar[2].exprUnion(), Unit: yyDollar[3].intervalTypeUnion()} } @@ -18401,7 +18394,7 @@ yydefault: case 1102: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5739 +//line sql.y:5729 { yyLOCAL = &FramePoint{Type: ExprFollowingType, Expr: yyDollar[1].exprUnion()} } @@ -18409,7 +18402,7 @@ yydefault: case 1103: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *FramePoint -//line sql.y:5743 +//line sql.y:5733 { yyLOCAL = &FramePoint{Type: ExprFollowingType, Expr: yyDollar[2].exprUnion(), Unit: yyDollar[3].intervalTypeUnion()} } @@ -18417,7 +18410,7 @@ yydefault: case 1104: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *FrameClause -//line sql.y:5748 +//line sql.y:5738 { yyLOCAL = nil } @@ -18425,7 +18418,7 @@ yydefault: case 1105: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *FrameClause -//line sql.y:5752 +//line sql.y:5742 { yyLOCAL = yyDollar[1].frameClauseUnion() } @@ -18433,7 +18426,7 @@ yydefault: case 1106: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *FrameClause -//line sql.y:5758 +//line sql.y:5748 { yyLOCAL = &FrameClause{Unit: yyDollar[1].frameUnitTypeUnion(), Start: yyDollar[2].framePointUnion()} } @@ -18441,7 +18434,7 @@ yydefault: case 1107: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *FrameClause -//line sql.y:5762 +//line sql.y:5752 { yyLOCAL = &FrameClause{Unit: yyDollar[1].frameUnitTypeUnion(), Start: yyDollar[3].framePointUnion(), End: yyDollar[5].framePointUnion()} } @@ -18449,7 +18442,7 @@ yydefault: case 1108: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Exprs -//line sql.y:5767 +//line sql.y:5757 { yyLOCAL = nil } @@ -18457,26 +18450,26 @@ yydefault: case 1109: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Exprs -//line sql.y:5771 +//line sql.y:5761 { yyLOCAL = yyDollar[3].exprsUnion() } yyVAL.union = yyLOCAL case 1110: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5776 +//line sql.y:5766 { } case 1111: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:5779 +//line sql.y:5769 { yyVAL.identifierCI = yyDollar[1].identifierCI } case 1112: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *WindowSpecification -//line sql.y:5785 +//line sql.y:5775 { yyLOCAL = &WindowSpecification{Name: yyDollar[1].identifierCI, PartitionClause: yyDollar[2].exprsUnion(), OrderClause: yyDollar[3].orderByUnion(), FrameClause: yyDollar[4].frameClauseUnion()} } @@ -18484,7 +18477,7 @@ yydefault: case 1113: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *OverClause -//line sql.y:5791 +//line sql.y:5781 { yyLOCAL = &OverClause{WindowSpec: yyDollar[3].windowSpecificationUnion()} } @@ -18492,7 +18485,7 @@ yydefault: case 1114: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *OverClause -//line sql.y:5795 +//line sql.y:5785 { yyLOCAL = &OverClause{WindowName: yyDollar[2].identifierCI} } @@ -18500,7 +18493,7 @@ yydefault: case 1115: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *NullTreatmentClause -//line sql.y:5800 +//line sql.y:5790 { yyLOCAL = nil } @@ -18508,7 +18501,7 @@ yydefault: case 1117: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *NullTreatmentClause -//line sql.y:5807 +//line sql.y:5797 { yyLOCAL = &NullTreatmentClause{yyDollar[1].nullTreatmentTypeUnion()} } @@ -18516,7 +18509,7 @@ yydefault: case 1118: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL NullTreatmentType -//line sql.y:5813 +//line sql.y:5803 { yyLOCAL = RespectNullsType } @@ -18524,7 +18517,7 @@ yydefault: case 1119: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL NullTreatmentType -//line sql.y:5817 +//line sql.y:5807 { yyLOCAL = IgnoreNullsType } @@ -18532,7 +18525,7 @@ yydefault: case 1120: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL FirstOrLastValueExprType -//line sql.y:5823 +//line sql.y:5813 { yyLOCAL = FirstValueExprType } @@ -18540,7 +18533,7 @@ yydefault: case 1121: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL FirstOrLastValueExprType -//line sql.y:5827 +//line sql.y:5817 { yyLOCAL = LastValueExprType } @@ -18548,7 +18541,7 @@ yydefault: case 1122: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL FromFirstLastType -//line sql.y:5833 +//line sql.y:5823 { yyLOCAL = FromFirstType } @@ -18556,7 +18549,7 @@ yydefault: case 1123: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL FromFirstLastType -//line sql.y:5837 +//line sql.y:5827 { yyLOCAL = FromLastType } @@ -18564,7 +18557,7 @@ yydefault: case 1124: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *FromFirstLastClause -//line sql.y:5842 +//line sql.y:5832 { yyLOCAL = nil } @@ -18572,7 +18565,7 @@ yydefault: case 1126: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *FromFirstLastClause -//line sql.y:5849 +//line sql.y:5839 { yyLOCAL = &FromFirstLastClause{yyDollar[1].fromFirstLastTypeUnion()} } @@ -18580,7 +18573,7 @@ yydefault: case 1127: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL LagLeadExprType -//line sql.y:5855 +//line sql.y:5845 { yyLOCAL = LagExprType } @@ -18588,7 +18581,7 @@ yydefault: case 1128: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL LagLeadExprType -//line sql.y:5859 +//line sql.y:5849 { yyLOCAL = LeadExprType } @@ -18596,7 +18589,7 @@ yydefault: case 1129: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *WindowDefinition -//line sql.y:5865 +//line sql.y:5855 { yyLOCAL = &WindowDefinition{Name: yyDollar[1].identifierCI, WindowSpec: yyDollar[4].windowSpecificationUnion()} } @@ -18604,34 +18597,34 @@ yydefault: case 1130: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL WindowDefinitions -//line sql.y:5871 +//line sql.y:5861 { yyLOCAL = WindowDefinitions{yyDollar[1].windowDefinitionUnion()} } yyVAL.union = yyLOCAL case 1131: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5875 +//line sql.y:5865 { yySLICE := (*WindowDefinitions)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].windowDefinitionUnion()) } case 1132: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:5881 +//line sql.y:5871 { yyVAL.str = "" } case 1133: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5885 +//line sql.y:5875 { yyVAL.str = string(yyDollar[2].identifierCI.String()) } case 1134: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL BoolVal -//line sql.y:5891 +//line sql.y:5881 { yyLOCAL = BoolVal(true) } @@ -18639,7 +18632,7 @@ yydefault: case 1135: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL BoolVal -//line sql.y:5895 +//line sql.y:5885 { yyLOCAL = BoolVal(false) } @@ -18647,7 +18640,7 @@ yydefault: case 1136: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IsExprOperator -//line sql.y:5902 +//line sql.y:5892 { yyLOCAL = IsTrueOp } @@ -18655,7 +18648,7 @@ yydefault: case 1137: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL IsExprOperator -//line sql.y:5906 +//line sql.y:5896 { yyLOCAL = IsNotTrueOp } @@ -18663,7 +18656,7 @@ yydefault: case 1138: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IsExprOperator -//line sql.y:5910 +//line sql.y:5900 { yyLOCAL = IsFalseOp } @@ -18671,7 +18664,7 @@ yydefault: case 1139: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL IsExprOperator -//line sql.y:5914 +//line sql.y:5904 { yyLOCAL = IsNotFalseOp } @@ -18679,7 +18672,7 @@ yydefault: case 1140: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5920 +//line sql.y:5910 { yyLOCAL = EqualOp } @@ -18687,7 +18680,7 @@ yydefault: case 1141: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5924 +//line sql.y:5914 { yyLOCAL = LessThanOp } @@ -18695,7 +18688,7 @@ yydefault: case 1142: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5928 +//line sql.y:5918 { yyLOCAL = GreaterThanOp } @@ -18703,7 +18696,7 @@ yydefault: case 1143: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5932 +//line sql.y:5922 { yyLOCAL = LessEqualOp } @@ -18711,7 +18704,7 @@ yydefault: case 1144: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5936 +//line sql.y:5926 { yyLOCAL = GreaterEqualOp } @@ -18719,7 +18712,7 @@ yydefault: case 1145: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5940 +//line sql.y:5930 { yyLOCAL = NotEqualOp } @@ -18727,7 +18720,7 @@ yydefault: case 1146: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ComparisonExprOperator -//line sql.y:5944 +//line sql.y:5934 { yyLOCAL = NullSafeEqualOp } @@ -18735,7 +18728,7 @@ yydefault: case 1147: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ColTuple -//line sql.y:5950 +//line sql.y:5940 { yyLOCAL = yyDollar[1].valTupleUnion() } @@ -18743,7 +18736,7 @@ yydefault: case 1148: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ColTuple -//line sql.y:5954 +//line sql.y:5944 { yyLOCAL = yyDollar[1].subqueryUnion() } @@ -18751,7 +18744,7 @@ yydefault: case 1149: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ColTuple -//line sql.y:5958 +//line sql.y:5948 { yyLOCAL = ListArg(yyDollar[1].str[2:]) markBindVariable(yylex, yyDollar[1].str[2:]) @@ -18760,7 +18753,7 @@ yydefault: case 1150: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Subquery -//line sql.y:5965 +//line sql.y:5955 { yyLOCAL = &Subquery{yyDollar[1].selStmtUnion()} } @@ -18768,14 +18761,14 @@ yydefault: case 1151: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Exprs -//line sql.y:5971 +//line sql.y:5961 { yyLOCAL = Exprs{yyDollar[1].exprUnion()} } yyVAL.union = yyLOCAL case 1152: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:5975 +//line sql.y:5965 { yySLICE := (*Exprs)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].exprUnion()) @@ -18783,7 +18776,7 @@ yydefault: case 1153: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:5985 +//line sql.y:5975 { yyLOCAL = &FuncExpr{Name: yyDollar[1].identifierCI, Exprs: yyDollar[3].selectExprsUnion()} } @@ -18791,7 +18784,7 @@ yydefault: case 1154: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:5989 +//line sql.y:5979 { yyLOCAL = &FuncExpr{Qualifier: yyDollar[1].identifierCS, Name: yyDollar[3].identifierCI, Exprs: yyDollar[5].selectExprsUnion()} } @@ -18799,7 +18792,7 @@ yydefault: case 1155: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:5999 +//line sql.y:5989 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("left"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -18807,7 +18800,7 @@ yydefault: case 1156: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6003 +//line sql.y:5993 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("right"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -18815,7 +18808,7 @@ yydefault: case 1157: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6007 +//line sql.y:5997 { yyLOCAL = &SubstrExpr{Name: yyDollar[3].exprUnion(), From: yyDollar[5].exprUnion(), To: yyDollar[7].exprUnion()} } @@ -18823,7 +18816,7 @@ yydefault: case 1158: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6011 +//line sql.y:6001 { yyLOCAL = &SubstrExpr{Name: yyDollar[3].exprUnion(), From: yyDollar[5].exprUnion()} } @@ -18831,7 +18824,7 @@ yydefault: case 1159: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6015 +//line sql.y:6005 { yyLOCAL = &SubstrExpr{Name: yyDollar[3].exprUnion(), From: yyDollar[5].exprUnion(), To: yyDollar[7].exprUnion()} } @@ -18839,7 +18832,7 @@ yydefault: case 1160: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6019 +//line sql.y:6009 { yyLOCAL = &SubstrExpr{Name: yyDollar[3].exprUnion(), From: yyDollar[5].exprUnion()} } @@ -18847,7 +18840,7 @@ yydefault: case 1161: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6023 +//line sql.y:6013 { yyLOCAL = &CaseExpr{Expr: yyDollar[2].exprUnion(), Whens: yyDollar[3].whensUnion(), Else: yyDollar[4].exprUnion()} } @@ -18855,7 +18848,7 @@ yydefault: case 1162: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6027 +//line sql.y:6017 { yyLOCAL = &ValuesFuncExpr{Name: yyDollar[3].colNameUnion()} } @@ -18863,7 +18856,7 @@ yydefault: case 1163: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Expr -//line sql.y:6031 +//line sql.y:6021 { yyLOCAL = &InsertExpr{Str: yyDollar[3].exprUnion(), Pos: yyDollar[5].exprUnion(), Len: yyDollar[7].exprUnion(), NewStr: yyDollar[9].exprUnion()} } @@ -18871,7 +18864,7 @@ yydefault: case 1164: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6035 +//line sql.y:6025 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI(yyDollar[1].str)} } @@ -18879,7 +18872,7 @@ yydefault: case 1165: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6046 +//line sql.y:6036 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("utc_date")} } @@ -18887,7 +18880,7 @@ yydefault: case 1166: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:6050 +//line sql.y:6040 { yyLOCAL = yyDollar[1].exprUnion() } @@ -18895,7 +18888,7 @@ yydefault: case 1167: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6056 +//line sql.y:6046 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("current_date")} } @@ -18903,7 +18896,7 @@ yydefault: case 1168: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6060 +//line sql.y:6050 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("curdate")} } @@ -18911,7 +18904,7 @@ yydefault: case 1169: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6064 +//line sql.y:6054 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("utc_time"), Fsp: yyDollar[2].integerUnion()} } @@ -18919,7 +18912,7 @@ yydefault: case 1170: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6069 +//line sql.y:6059 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("curtime"), Fsp: yyDollar[2].integerUnion()} } @@ -18927,7 +18920,7 @@ yydefault: case 1171: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6074 +//line sql.y:6064 { yyLOCAL = &CurTimeFuncExpr{Name: NewIdentifierCI("current_time"), Fsp: yyDollar[2].integerUnion()} } @@ -18935,7 +18928,7 @@ yydefault: case 1172: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6078 +//line sql.y:6068 { yyLOCAL = &CountStar{} } @@ -18943,7 +18936,7 @@ yydefault: case 1173: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6082 +//line sql.y:6072 { yyLOCAL = &Count{Distinct: yyDollar[3].booleanUnion(), Args: yyDollar[4].exprsUnion()} } @@ -18951,7 +18944,7 @@ yydefault: case 1174: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6086 +//line sql.y:6076 { yyLOCAL = &Max{Distinct: yyDollar[3].booleanUnion(), Arg: yyDollar[4].exprUnion()} } @@ -18959,7 +18952,7 @@ yydefault: case 1175: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6090 +//line sql.y:6080 { yyLOCAL = &Min{Distinct: yyDollar[3].booleanUnion(), Arg: yyDollar[4].exprUnion()} } @@ -18967,7 +18960,7 @@ yydefault: case 1176: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6094 +//line sql.y:6084 { yyLOCAL = &Sum{Distinct: yyDollar[3].booleanUnion(), Arg: yyDollar[4].exprUnion()} } @@ -18975,7 +18968,7 @@ yydefault: case 1177: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6098 +//line sql.y:6088 { yyLOCAL = &Avg{Distinct: yyDollar[3].booleanUnion(), Arg: yyDollar[4].exprUnion()} } @@ -18983,7 +18976,7 @@ yydefault: case 1178: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6102 +//line sql.y:6092 { yyLOCAL = &BitAnd{Arg: yyDollar[3].exprUnion()} } @@ -18991,7 +18984,7 @@ yydefault: case 1179: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6106 +//line sql.y:6096 { yyLOCAL = &BitOr{Arg: yyDollar[3].exprUnion()} } @@ -18999,7 +18992,7 @@ yydefault: case 1180: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6110 +//line sql.y:6100 { yyLOCAL = &BitXor{Arg: yyDollar[3].exprUnion()} } @@ -19007,7 +19000,7 @@ yydefault: case 1181: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6114 +//line sql.y:6104 { yyLOCAL = &Std{Arg: yyDollar[3].exprUnion()} } @@ -19015,7 +19008,7 @@ yydefault: case 1182: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6118 +//line sql.y:6108 { yyLOCAL = &StdDev{Arg: yyDollar[3].exprUnion()} } @@ -19023,7 +19016,7 @@ yydefault: case 1183: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6122 +//line sql.y:6112 { yyLOCAL = &StdPop{Arg: yyDollar[3].exprUnion()} } @@ -19031,7 +19024,7 @@ yydefault: case 1184: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6126 +//line sql.y:6116 { yyLOCAL = &StdSamp{Arg: yyDollar[3].exprUnion()} } @@ -19039,7 +19032,7 @@ yydefault: case 1185: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6130 +//line sql.y:6120 { yyLOCAL = &VarPop{Arg: yyDollar[3].exprUnion()} } @@ -19047,7 +19040,7 @@ yydefault: case 1186: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6134 +//line sql.y:6124 { yyLOCAL = &VarSamp{Arg: yyDollar[3].exprUnion()} } @@ -19055,7 +19048,7 @@ yydefault: case 1187: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6138 +//line sql.y:6128 { yyLOCAL = &Variance{Arg: yyDollar[3].exprUnion()} } @@ -19063,7 +19056,7 @@ yydefault: case 1188: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6142 +//line sql.y:6132 { yyLOCAL = &GroupConcatExpr{Distinct: yyDollar[3].booleanUnion(), Exprs: yyDollar[4].exprsUnion(), OrderBy: yyDollar[5].orderByUnion(), Separator: yyDollar[6].str, Limit: yyDollar[7].limitUnion()} } @@ -19071,7 +19064,7 @@ yydefault: case 1189: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6146 +//line sql.y:6136 { yyLOCAL = &AnyValue{Arg: yyDollar[3].exprUnion()} } @@ -19079,7 +19072,7 @@ yydefault: case 1190: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6150 +//line sql.y:6140 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprTimestampadd, Date: yyDollar[7].exprUnion(), Interval: yyDollar[5].exprUnion(), Unit: yyDollar[3].intervalTypeUnion()} } @@ -19087,7 +19080,7 @@ yydefault: case 1191: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6154 +//line sql.y:6144 { yyLOCAL = &TimestampDiffExpr{Unit: yyDollar[3].intervalTypeUnion(), Expr1: yyDollar[5].exprUnion(), Expr2: yyDollar[7].exprUnion()} } @@ -19095,7 +19088,7 @@ yydefault: case 1192: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6158 +//line sql.y:6148 { yyLOCAL = &ExtractFuncExpr{IntervalType: yyDollar[3].intervalTypeUnion(), Expr: yyDollar[5].exprUnion()} } @@ -19103,7 +19096,7 @@ yydefault: case 1193: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6162 +//line sql.y:6152 { yyLOCAL = &WeightStringFuncExpr{Expr: yyDollar[3].exprUnion(), As: yyDollar[4].convertTypeUnion()} } @@ -19111,7 +19104,7 @@ yydefault: case 1194: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6166 +//line sql.y:6156 { yyLOCAL = &JSONPrettyExpr{JSONVal: yyDollar[3].exprUnion()} } @@ -19119,7 +19112,7 @@ yydefault: case 1195: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6170 +//line sql.y:6160 { yyLOCAL = &JSONStorageFreeExpr{JSONVal: yyDollar[3].exprUnion()} } @@ -19127,7 +19120,7 @@ yydefault: case 1196: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6174 +//line sql.y:6164 { yyLOCAL = &JSONStorageSizeExpr{JSONVal: yyDollar[3].exprUnion()} } @@ -19135,7 +19128,7 @@ yydefault: case 1197: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6178 +//line sql.y:6168 { yyLOCAL = &TrimFuncExpr{TrimFuncType: LTrimType, Type: LeadingTrimType, StringArg: yyDollar[3].exprUnion()} } @@ -19143,7 +19136,7 @@ yydefault: case 1198: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6182 +//line sql.y:6172 { yyLOCAL = &TrimFuncExpr{TrimFuncType: RTrimType, Type: TrailingTrimType, StringArg: yyDollar[3].exprUnion()} } @@ -19151,7 +19144,7 @@ yydefault: case 1199: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Expr -//line sql.y:6186 +//line sql.y:6176 { yyLOCAL = &TrimFuncExpr{Type: yyDollar[3].trimTypeUnion(), TrimArg: yyDollar[4].exprUnion(), StringArg: yyDollar[6].exprUnion()} } @@ -19159,7 +19152,7 @@ yydefault: case 1200: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6190 +//line sql.y:6180 { yyLOCAL = &TrimFuncExpr{StringArg: yyDollar[3].exprUnion()} } @@ -19167,7 +19160,7 @@ yydefault: case 1201: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6194 +//line sql.y:6184 { yyLOCAL = &CharExpr{Exprs: yyDollar[3].exprsUnion()} } @@ -19175,7 +19168,7 @@ yydefault: case 1202: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6198 +//line sql.y:6188 { yyLOCAL = &CharExpr{Exprs: yyDollar[3].exprsUnion(), Charset: yyDollar[5].str} } @@ -19183,7 +19176,7 @@ yydefault: case 1203: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6202 +//line sql.y:6192 { yyLOCAL = &TrimFuncExpr{TrimArg: yyDollar[3].exprUnion(), StringArg: yyDollar[5].exprUnion()} } @@ -19191,7 +19184,7 @@ yydefault: case 1204: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6206 +//line sql.y:6196 { yyLOCAL = &LocateExpr{SubStr: yyDollar[3].exprUnion(), Str: yyDollar[5].exprUnion()} } @@ -19199,7 +19192,7 @@ yydefault: case 1205: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6210 +//line sql.y:6200 { yyLOCAL = &LocateExpr{SubStr: yyDollar[3].exprUnion(), Str: yyDollar[5].exprUnion(), Pos: yyDollar[7].exprUnion()} } @@ -19207,7 +19200,7 @@ yydefault: case 1206: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6214 +//line sql.y:6204 { yyLOCAL = &LocateExpr{SubStr: yyDollar[3].exprUnion(), Str: yyDollar[5].exprUnion()} } @@ -19215,7 +19208,7 @@ yydefault: case 1207: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6218 +//line sql.y:6208 { yyLOCAL = &LockingFunc{Type: GetLock, Name: yyDollar[3].exprUnion(), Timeout: yyDollar[5].exprUnion()} } @@ -19223,7 +19216,7 @@ yydefault: case 1208: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6222 +//line sql.y:6212 { yyLOCAL = &LockingFunc{Type: IsFreeLock, Name: yyDollar[3].exprUnion()} } @@ -19231,7 +19224,7 @@ yydefault: case 1209: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6226 +//line sql.y:6216 { yyLOCAL = &LockingFunc{Type: IsUsedLock, Name: yyDollar[3].exprUnion()} } @@ -19239,7 +19232,7 @@ yydefault: case 1210: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:6230 +//line sql.y:6220 { yyLOCAL = &LockingFunc{Type: ReleaseAllLocks} } @@ -19247,7 +19240,7 @@ yydefault: case 1211: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6234 +//line sql.y:6224 { yyLOCAL = &LockingFunc{Type: ReleaseLock, Name: yyDollar[3].exprUnion()} } @@ -19255,7 +19248,7 @@ yydefault: case 1212: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6238 +//line sql.y:6228 { yyLOCAL = &JSONSchemaValidFuncExpr{Schema: yyDollar[3].exprUnion(), Document: yyDollar[5].exprUnion()} } @@ -19263,7 +19256,7 @@ yydefault: case 1213: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6242 +//line sql.y:6232 { yyLOCAL = &JSONSchemaValidationReportFuncExpr{Schema: yyDollar[3].exprUnion(), Document: yyDollar[5].exprUnion()} } @@ -19271,7 +19264,7 @@ yydefault: case 1214: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6246 +//line sql.y:6236 { yyLOCAL = &JSONArrayExpr{Params: yyDollar[3].exprsUnion()} } @@ -19279,7 +19272,7 @@ yydefault: case 1215: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6250 +//line sql.y:6240 { yyLOCAL = &GeomFormatExpr{FormatType: BinaryFormat, Geom: yyDollar[3].exprUnion()} } @@ -19287,7 +19280,7 @@ yydefault: case 1216: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6254 +//line sql.y:6244 { yyLOCAL = &GeomFormatExpr{FormatType: BinaryFormat, Geom: yyDollar[3].exprUnion(), AxisOrderOpt: yyDollar[5].exprUnion()} } @@ -19295,7 +19288,7 @@ yydefault: case 1217: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6258 +//line sql.y:6248 { yyLOCAL = &GeomFormatExpr{FormatType: TextFormat, Geom: yyDollar[3].exprUnion()} } @@ -19303,7 +19296,7 @@ yydefault: case 1218: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6262 +//line sql.y:6252 { yyLOCAL = &GeomFormatExpr{FormatType: TextFormat, Geom: yyDollar[3].exprUnion(), AxisOrderOpt: yyDollar[5].exprUnion()} } @@ -19311,7 +19304,7 @@ yydefault: case 1219: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6266 +//line sql.y:6256 { yyLOCAL = &GeomPropertyFuncExpr{Property: IsEmpty, Geom: yyDollar[3].exprUnion()} } @@ -19319,7 +19312,7 @@ yydefault: case 1220: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6270 +//line sql.y:6260 { yyLOCAL = &GeomPropertyFuncExpr{Property: IsSimple, Geom: yyDollar[3].exprUnion()} } @@ -19327,7 +19320,7 @@ yydefault: case 1221: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6274 +//line sql.y:6264 { yyLOCAL = &GeomPropertyFuncExpr{Property: Dimension, Geom: yyDollar[3].exprUnion()} } @@ -19335,7 +19328,7 @@ yydefault: case 1222: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6278 +//line sql.y:6268 { yyLOCAL = &GeomPropertyFuncExpr{Property: Envelope, Geom: yyDollar[3].exprUnion()} } @@ -19343,7 +19336,7 @@ yydefault: case 1223: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6282 +//line sql.y:6272 { yyLOCAL = &GeomPropertyFuncExpr{Property: GeometryType, Geom: yyDollar[3].exprUnion()} } @@ -19351,7 +19344,7 @@ yydefault: case 1224: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6286 +//line sql.y:6276 { yyLOCAL = &PointPropertyFuncExpr{Property: Latitude, Point: yyDollar[3].exprUnion()} } @@ -19359,7 +19352,7 @@ yydefault: case 1225: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6290 +//line sql.y:6280 { yyLOCAL = &PointPropertyFuncExpr{Property: Latitude, Point: yyDollar[3].exprUnion(), ValueToSet: yyDollar[5].exprUnion()} } @@ -19367,7 +19360,7 @@ yydefault: case 1226: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6294 +//line sql.y:6284 { yyLOCAL = &PointPropertyFuncExpr{Property: Longitude, Point: yyDollar[3].exprUnion()} } @@ -19375,7 +19368,7 @@ yydefault: case 1227: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6298 +//line sql.y:6288 { yyLOCAL = &PointPropertyFuncExpr{Property: Longitude, Point: yyDollar[3].exprUnion(), ValueToSet: yyDollar[5].exprUnion()} } @@ -19383,7 +19376,7 @@ yydefault: case 1228: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6302 +//line sql.y:6292 { yyLOCAL = &LinestrPropertyFuncExpr{Property: EndPoint, Linestring: yyDollar[3].exprUnion()} } @@ -19391,7 +19384,7 @@ yydefault: case 1229: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6306 +//line sql.y:6296 { yyLOCAL = &LinestrPropertyFuncExpr{Property: IsClosed, Linestring: yyDollar[3].exprUnion()} } @@ -19399,7 +19392,7 @@ yydefault: case 1230: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6310 +//line sql.y:6300 { yyLOCAL = &LinestrPropertyFuncExpr{Property: Length, Linestring: yyDollar[3].exprUnion()} } @@ -19407,7 +19400,7 @@ yydefault: case 1231: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6314 +//line sql.y:6304 { yyLOCAL = &LinestrPropertyFuncExpr{Property: Length, Linestring: yyDollar[3].exprUnion(), PropertyDefArg: yyDollar[5].exprUnion()} } @@ -19415,7 +19408,7 @@ yydefault: case 1232: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6318 +//line sql.y:6308 { yyLOCAL = &LinestrPropertyFuncExpr{Property: NumPoints, Linestring: yyDollar[3].exprUnion()} } @@ -19423,7 +19416,7 @@ yydefault: case 1233: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6322 +//line sql.y:6312 { yyLOCAL = &LinestrPropertyFuncExpr{Property: PointN, Linestring: yyDollar[3].exprUnion(), PropertyDefArg: yyDollar[5].exprUnion()} } @@ -19431,7 +19424,7 @@ yydefault: case 1234: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6326 +//line sql.y:6316 { yyLOCAL = &LinestrPropertyFuncExpr{Property: StartPoint, Linestring: yyDollar[3].exprUnion()} } @@ -19439,7 +19432,7 @@ yydefault: case 1235: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6330 +//line sql.y:6320 { yyLOCAL = &PointPropertyFuncExpr{Property: XCordinate, Point: yyDollar[3].exprUnion()} } @@ -19447,7 +19440,7 @@ yydefault: case 1236: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6334 +//line sql.y:6324 { yyLOCAL = &PointPropertyFuncExpr{Property: XCordinate, Point: yyDollar[3].exprUnion(), ValueToSet: yyDollar[5].exprUnion()} } @@ -19455,7 +19448,7 @@ yydefault: case 1237: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6338 +//line sql.y:6328 { yyLOCAL = &PointPropertyFuncExpr{Property: YCordinate, Point: yyDollar[3].exprUnion()} } @@ -19463,7 +19456,7 @@ yydefault: case 1238: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6342 +//line sql.y:6332 { yyLOCAL = &PointPropertyFuncExpr{Property: YCordinate, Point: yyDollar[3].exprUnion(), ValueToSet: yyDollar[5].exprUnion()} } @@ -19471,7 +19464,7 @@ yydefault: case 1239: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6346 +//line sql.y:6336 { yyLOCAL = &GeomFromTextExpr{Type: GeometryFromText, WktText: yyDollar[3].exprUnion()} } @@ -19479,7 +19472,7 @@ yydefault: case 1240: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6350 +//line sql.y:6340 { yyLOCAL = &GeomFromTextExpr{Type: GeometryFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19487,7 +19480,7 @@ yydefault: case 1241: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6354 +//line sql.y:6344 { yyLOCAL = &GeomFromTextExpr{Type: GeometryFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19495,7 +19488,7 @@ yydefault: case 1242: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6358 +//line sql.y:6348 { yyLOCAL = &GeomFromTextExpr{Type: GeometryCollectionFromText, WktText: yyDollar[3].exprUnion()} } @@ -19503,7 +19496,7 @@ yydefault: case 1243: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6362 +//line sql.y:6352 { yyLOCAL = &GeomFromTextExpr{Type: GeometryCollectionFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19511,7 +19504,7 @@ yydefault: case 1244: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6366 +//line sql.y:6356 { yyLOCAL = &GeomFromTextExpr{Type: GeometryCollectionFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19519,7 +19512,7 @@ yydefault: case 1245: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6370 +//line sql.y:6360 { yyLOCAL = &GeomFromTextExpr{Type: LineStringFromText, WktText: yyDollar[3].exprUnion()} } @@ -19527,7 +19520,7 @@ yydefault: case 1246: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6374 +//line sql.y:6364 { yyLOCAL = &GeomFromTextExpr{Type: LineStringFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19535,7 +19528,7 @@ yydefault: case 1247: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6378 +//line sql.y:6368 { yyLOCAL = &GeomFromTextExpr{Type: LineStringFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19543,7 +19536,7 @@ yydefault: case 1248: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6382 +//line sql.y:6372 { yyLOCAL = &GeomFromTextExpr{Type: MultiLinestringFromText, WktText: yyDollar[3].exprUnion()} } @@ -19551,7 +19544,7 @@ yydefault: case 1249: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6386 +//line sql.y:6376 { yyLOCAL = &GeomFromTextExpr{Type: MultiLinestringFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19559,7 +19552,7 @@ yydefault: case 1250: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6390 +//line sql.y:6380 { yyLOCAL = &GeomFromTextExpr{Type: MultiLinestringFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19567,7 +19560,7 @@ yydefault: case 1251: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6394 +//line sql.y:6384 { yyLOCAL = &GeomFromTextExpr{Type: MultiPointFromText, WktText: yyDollar[3].exprUnion()} } @@ -19575,7 +19568,7 @@ yydefault: case 1252: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6398 +//line sql.y:6388 { yyLOCAL = &GeomFromTextExpr{Type: MultiPointFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19583,7 +19576,7 @@ yydefault: case 1253: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6402 +//line sql.y:6392 { yyLOCAL = &GeomFromTextExpr{Type: MultiPointFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19591,7 +19584,7 @@ yydefault: case 1254: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6406 +//line sql.y:6396 { yyLOCAL = &GeomFromTextExpr{Type: MultiPolygonFromText, WktText: yyDollar[3].exprUnion()} } @@ -19599,7 +19592,7 @@ yydefault: case 1255: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6410 +//line sql.y:6400 { yyLOCAL = &GeomFromTextExpr{Type: MultiPolygonFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19607,7 +19600,7 @@ yydefault: case 1256: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6414 +//line sql.y:6404 { yyLOCAL = &GeomFromTextExpr{Type: MultiPolygonFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19615,7 +19608,7 @@ yydefault: case 1257: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6418 +//line sql.y:6408 { yyLOCAL = &GeomFromTextExpr{Type: PointFromText, WktText: yyDollar[3].exprUnion()} } @@ -19623,7 +19616,7 @@ yydefault: case 1258: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6422 +//line sql.y:6412 { yyLOCAL = &GeomFromTextExpr{Type: PointFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19631,7 +19624,7 @@ yydefault: case 1259: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6426 +//line sql.y:6416 { yyLOCAL = &GeomFromTextExpr{Type: PointFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19639,7 +19632,7 @@ yydefault: case 1260: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6430 +//line sql.y:6420 { yyLOCAL = &GeomFromTextExpr{Type: PolygonFromText, WktText: yyDollar[3].exprUnion()} } @@ -19647,7 +19640,7 @@ yydefault: case 1261: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6434 +//line sql.y:6424 { yyLOCAL = &GeomFromTextExpr{Type: PolygonFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19655,7 +19648,7 @@ yydefault: case 1262: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6438 +//line sql.y:6428 { yyLOCAL = &GeomFromTextExpr{Type: PolygonFromText, WktText: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19663,7 +19656,7 @@ yydefault: case 1263: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6442 +//line sql.y:6432 { yyLOCAL = &GeomFromWKBExpr{Type: GeometryFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19671,7 +19664,7 @@ yydefault: case 1264: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6446 +//line sql.y:6436 { yyLOCAL = &GeomFromWKBExpr{Type: GeometryFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19679,7 +19672,7 @@ yydefault: case 1265: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6450 +//line sql.y:6440 { yyLOCAL = &GeomFromWKBExpr{Type: GeometryFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19687,7 +19680,7 @@ yydefault: case 1266: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6454 +//line sql.y:6444 { yyLOCAL = &GeomFromWKBExpr{Type: GeometryCollectionFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19695,7 +19688,7 @@ yydefault: case 1267: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6458 +//line sql.y:6448 { yyLOCAL = &GeomFromWKBExpr{Type: GeometryCollectionFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19703,7 +19696,7 @@ yydefault: case 1268: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6462 +//line sql.y:6452 { yyLOCAL = &GeomFromWKBExpr{Type: GeometryCollectionFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19711,7 +19704,7 @@ yydefault: case 1269: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6466 +//line sql.y:6456 { yyLOCAL = &GeomFromWKBExpr{Type: LineStringFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19719,7 +19712,7 @@ yydefault: case 1270: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6470 +//line sql.y:6460 { yyLOCAL = &GeomFromWKBExpr{Type: LineStringFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19727,7 +19720,7 @@ yydefault: case 1271: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6474 +//line sql.y:6464 { yyLOCAL = &GeomFromWKBExpr{Type: LineStringFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19735,7 +19728,7 @@ yydefault: case 1272: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6478 +//line sql.y:6468 { yyLOCAL = &GeomFromWKBExpr{Type: MultiLinestringFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19743,7 +19736,7 @@ yydefault: case 1273: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6482 +//line sql.y:6472 { yyLOCAL = &GeomFromWKBExpr{Type: MultiLinestringFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19751,7 +19744,7 @@ yydefault: case 1274: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6486 +//line sql.y:6476 { yyLOCAL = &GeomFromWKBExpr{Type: MultiLinestringFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19759,7 +19752,7 @@ yydefault: case 1275: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6490 +//line sql.y:6480 { yyLOCAL = &GeomFromWKBExpr{Type: MultiPointFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19767,7 +19760,7 @@ yydefault: case 1276: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6494 +//line sql.y:6484 { yyLOCAL = &GeomFromWKBExpr{Type: MultiPointFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19775,7 +19768,7 @@ yydefault: case 1277: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6498 +//line sql.y:6488 { yyLOCAL = &GeomFromWKBExpr{Type: MultiPointFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19783,7 +19776,7 @@ yydefault: case 1278: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6502 +//line sql.y:6492 { yyLOCAL = &GeomFromWKBExpr{Type: MultiPolygonFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19791,7 +19784,7 @@ yydefault: case 1279: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6506 +//line sql.y:6496 { yyLOCAL = &GeomFromWKBExpr{Type: MultiPolygonFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19799,7 +19792,7 @@ yydefault: case 1280: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6510 +//line sql.y:6500 { yyLOCAL = &GeomFromWKBExpr{Type: MultiPolygonFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19807,7 +19800,7 @@ yydefault: case 1281: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6514 +//line sql.y:6504 { yyLOCAL = &GeomFromWKBExpr{Type: PointFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19815,7 +19808,7 @@ yydefault: case 1282: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6518 +//line sql.y:6508 { yyLOCAL = &GeomFromWKBExpr{Type: PointFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19823,7 +19816,7 @@ yydefault: case 1283: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6522 +//line sql.y:6512 { yyLOCAL = &GeomFromWKBExpr{Type: PointFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19831,7 +19824,7 @@ yydefault: case 1284: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6526 +//line sql.y:6516 { yyLOCAL = &GeomFromWKBExpr{Type: PolygonFromWKB, WkbBlob: yyDollar[3].exprUnion()} } @@ -19839,7 +19832,7 @@ yydefault: case 1285: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6530 +//line sql.y:6520 { yyLOCAL = &GeomFromWKBExpr{Type: PolygonFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion()} } @@ -19847,7 +19840,7 @@ yydefault: case 1286: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6534 +//line sql.y:6524 { yyLOCAL = &GeomFromWKBExpr{Type: PolygonFromWKB, WkbBlob: yyDollar[3].exprUnion(), Srid: yyDollar[5].exprUnion(), AxisOrderOpt: yyDollar[7].exprUnion()} } @@ -19855,7 +19848,7 @@ yydefault: case 1287: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6538 +//line sql.y:6528 { yyLOCAL = &PolygonPropertyFuncExpr{Property: Area, Polygon: yyDollar[3].exprUnion()} } @@ -19863,7 +19856,7 @@ yydefault: case 1288: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6542 +//line sql.y:6532 { yyLOCAL = &PolygonPropertyFuncExpr{Property: Centroid, Polygon: yyDollar[3].exprUnion()} } @@ -19871,7 +19864,7 @@ yydefault: case 1289: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6546 +//line sql.y:6536 { yyLOCAL = &PolygonPropertyFuncExpr{Property: ExteriorRing, Polygon: yyDollar[3].exprUnion()} } @@ -19879,7 +19872,7 @@ yydefault: case 1290: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6550 +//line sql.y:6540 { yyLOCAL = &PolygonPropertyFuncExpr{Property: InteriorRingN, Polygon: yyDollar[3].exprUnion(), PropertyDefArg: yyDollar[5].exprUnion()} } @@ -19887,7 +19880,7 @@ yydefault: case 1291: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6554 +//line sql.y:6544 { yyLOCAL = &PolygonPropertyFuncExpr{Property: NumInteriorRings, Polygon: yyDollar[3].exprUnion()} } @@ -19895,7 +19888,7 @@ yydefault: case 1292: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6558 +//line sql.y:6548 { yyLOCAL = &GeomCollPropertyFuncExpr{Property: GeometryN, GeomColl: yyDollar[3].exprUnion(), PropertyDefArg: yyDollar[5].exprUnion()} } @@ -19903,7 +19896,7 @@ yydefault: case 1293: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6562 +//line sql.y:6552 { yyLOCAL = &GeomCollPropertyFuncExpr{Property: NumGeometries, GeomColl: yyDollar[3].exprUnion()} } @@ -19911,7 +19904,7 @@ yydefault: case 1294: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6566 +//line sql.y:6556 { yyLOCAL = &GeoHashFromLatLongExpr{Longitude: yyDollar[3].exprUnion(), Latitude: yyDollar[5].exprUnion(), MaxLength: yyDollar[7].exprUnion()} } @@ -19919,7 +19912,7 @@ yydefault: case 1295: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6570 +//line sql.y:6560 { yyLOCAL = &GeoHashFromPointExpr{Point: yyDollar[3].exprUnion(), MaxLength: yyDollar[5].exprUnion()} } @@ -19927,7 +19920,7 @@ yydefault: case 1296: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6574 +//line sql.y:6564 { yyLOCAL = &GeomFromGeoHashExpr{GeomType: LatitudeFromHash, GeoHash: yyDollar[3].exprUnion()} } @@ -19935,7 +19928,7 @@ yydefault: case 1297: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6578 +//line sql.y:6568 { yyLOCAL = &GeomFromGeoHashExpr{GeomType: LongitudeFromHash, GeoHash: yyDollar[3].exprUnion()} } @@ -19943,7 +19936,7 @@ yydefault: case 1298: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6582 +//line sql.y:6572 { yyLOCAL = &GeomFromGeoHashExpr{GeomType: PointFromHash, GeoHash: yyDollar[3].exprUnion(), SridOpt: yyDollar[5].exprUnion()} } @@ -19951,7 +19944,7 @@ yydefault: case 1299: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6586 +//line sql.y:6576 { yyLOCAL = &GeomFromGeoJSONExpr{GeoJSON: yyDollar[3].exprUnion()} } @@ -19959,7 +19952,7 @@ yydefault: case 1300: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6590 +//line sql.y:6580 { yyLOCAL = &GeomFromGeoJSONExpr{GeoJSON: yyDollar[3].exprUnion(), HigherDimHandlerOpt: yyDollar[5].exprUnion()} } @@ -19967,7 +19960,7 @@ yydefault: case 1301: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6594 +//line sql.y:6584 { yyLOCAL = &GeomFromGeoJSONExpr{GeoJSON: yyDollar[3].exprUnion(), HigherDimHandlerOpt: yyDollar[5].exprUnion(), Srid: yyDollar[7].exprUnion()} } @@ -19975,7 +19968,7 @@ yydefault: case 1302: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6598 +//line sql.y:6588 { yyLOCAL = &GeoJSONFromGeomExpr{Geom: yyDollar[3].exprUnion()} } @@ -19983,7 +19976,7 @@ yydefault: case 1303: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6602 +//line sql.y:6592 { yyLOCAL = &GeoJSONFromGeomExpr{Geom: yyDollar[3].exprUnion(), MaxDecimalDigits: yyDollar[5].exprUnion()} } @@ -19991,7 +19984,7 @@ yydefault: case 1304: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6606 +//line sql.y:6596 { yyLOCAL = &GeoJSONFromGeomExpr{Geom: yyDollar[3].exprUnion(), MaxDecimalDigits: yyDollar[5].exprUnion(), Bitmask: yyDollar[7].exprUnion()} } @@ -19999,7 +19992,7 @@ yydefault: case 1305: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6610 +//line sql.y:6600 { yyLOCAL = &JSONObjectExpr{Params: yyDollar[3].jsonObjectParamsUnion()} } @@ -20007,7 +20000,7 @@ yydefault: case 1306: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6614 +//line sql.y:6604 { yyLOCAL = &JSONQuoteExpr{StringArg: yyDollar[3].exprUnion()} } @@ -20015,7 +20008,7 @@ yydefault: case 1307: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6618 +//line sql.y:6608 { yyLOCAL = &JSONContainsExpr{Target: yyDollar[3].exprUnion(), Candidate: yyDollar[5].exprsUnion()[0], PathList: yyDollar[5].exprsUnion()[1:]} } @@ -20023,7 +20016,7 @@ yydefault: case 1308: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6622 +//line sql.y:6612 { yyLOCAL = &JSONContainsPathExpr{JSONDoc: yyDollar[3].exprUnion(), OneOrAll: yyDollar[5].exprUnion(), PathList: yyDollar[7].exprsUnion()} } @@ -20031,7 +20024,7 @@ yydefault: case 1309: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6626 +//line sql.y:6616 { yyLOCAL = &JSONExtractExpr{JSONDoc: yyDollar[3].exprUnion(), PathList: yyDollar[5].exprsUnion()} } @@ -20039,7 +20032,7 @@ yydefault: case 1310: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6630 +//line sql.y:6620 { yyLOCAL = &JSONKeysExpr{JSONDoc: yyDollar[3].exprUnion()} } @@ -20047,7 +20040,7 @@ yydefault: case 1311: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6634 +//line sql.y:6624 { yyLOCAL = &JSONKeysExpr{JSONDoc: yyDollar[3].exprUnion(), Path: yyDollar[5].exprUnion()} } @@ -20055,7 +20048,7 @@ yydefault: case 1312: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6638 +//line sql.y:6628 { yyLOCAL = &JSONOverlapsExpr{JSONDoc1: yyDollar[3].exprUnion(), JSONDoc2: yyDollar[5].exprUnion()} } @@ -20063,7 +20056,7 @@ yydefault: case 1313: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6642 +//line sql.y:6632 { yyLOCAL = &JSONSearchExpr{JSONDoc: yyDollar[3].exprUnion(), OneOrAll: yyDollar[5].exprUnion(), SearchStr: yyDollar[7].exprUnion()} } @@ -20071,7 +20064,7 @@ yydefault: case 1314: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Expr -//line sql.y:6646 +//line sql.y:6636 { yyLOCAL = &JSONSearchExpr{JSONDoc: yyDollar[3].exprUnion(), OneOrAll: yyDollar[5].exprUnion(), SearchStr: yyDollar[7].exprUnion(), EscapeChar: yyDollar[9].exprsUnion()[0], PathList: yyDollar[9].exprsUnion()[1:]} } @@ -20079,7 +20072,7 @@ yydefault: case 1315: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL Expr -//line sql.y:6650 +//line sql.y:6640 { yyLOCAL = &JSONValueExpr{JSONDoc: yyDollar[3].exprUnion(), Path: yyDollar[5].exprUnion(), ReturningType: yyDollar[6].convertTypeUnion()} } @@ -20087,7 +20080,7 @@ yydefault: case 1316: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6654 +//line sql.y:6644 { yyLOCAL = &JSONValueExpr{JSONDoc: yyDollar[3].exprUnion(), Path: yyDollar[5].exprUnion(), ReturningType: yyDollar[6].convertTypeUnion(), EmptyOnResponse: yyDollar[7].jtOnResponseUnion()} } @@ -20095,7 +20088,7 @@ yydefault: case 1317: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6658 +//line sql.y:6648 { yyLOCAL = &JSONValueExpr{JSONDoc: yyDollar[3].exprUnion(), Path: yyDollar[5].exprUnion(), ReturningType: yyDollar[6].convertTypeUnion(), ErrorOnResponse: yyDollar[7].jtOnResponseUnion()} } @@ -20103,7 +20096,7 @@ yydefault: case 1318: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL Expr -//line sql.y:6662 +//line sql.y:6652 { yyLOCAL = &JSONValueExpr{JSONDoc: yyDollar[3].exprUnion(), Path: yyDollar[5].exprUnion(), ReturningType: yyDollar[6].convertTypeUnion(), EmptyOnResponse: yyDollar[7].jtOnResponseUnion(), ErrorOnResponse: yyDollar[8].jtOnResponseUnion()} } @@ -20111,7 +20104,7 @@ yydefault: case 1319: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6666 +//line sql.y:6656 { yyLOCAL = &JSONAttributesExpr{Type: DepthAttributeType, JSONDoc: yyDollar[3].exprUnion()} } @@ -20119,7 +20112,7 @@ yydefault: case 1320: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6670 +//line sql.y:6660 { yyLOCAL = &JSONAttributesExpr{Type: ValidAttributeType, JSONDoc: yyDollar[3].exprUnion()} } @@ -20127,7 +20120,7 @@ yydefault: case 1321: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6674 +//line sql.y:6664 { yyLOCAL = &JSONAttributesExpr{Type: TypeAttributeType, JSONDoc: yyDollar[3].exprUnion()} } @@ -20135,7 +20128,7 @@ yydefault: case 1322: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6678 +//line sql.y:6668 { yyLOCAL = &JSONAttributesExpr{Type: LengthAttributeType, JSONDoc: yyDollar[3].exprUnion()} } @@ -20143,7 +20136,7 @@ yydefault: case 1323: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6682 +//line sql.y:6672 { yyLOCAL = &JSONAttributesExpr{Type: LengthAttributeType, JSONDoc: yyDollar[3].exprUnion(), Path: yyDollar[5].exprUnion()} } @@ -20151,7 +20144,7 @@ yydefault: case 1324: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6686 +//line sql.y:6676 { yyLOCAL = &JSONValueModifierExpr{Type: JSONArrayAppendType, JSONDoc: yyDollar[3].exprUnion(), Params: yyDollar[5].jsonObjectParamsUnion()} } @@ -20159,7 +20152,7 @@ yydefault: case 1325: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6690 +//line sql.y:6680 { yyLOCAL = &JSONValueModifierExpr{Type: JSONArrayInsertType, JSONDoc: yyDollar[3].exprUnion(), Params: yyDollar[5].jsonObjectParamsUnion()} } @@ -20167,7 +20160,7 @@ yydefault: case 1326: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6694 +//line sql.y:6684 { yyLOCAL = &JSONValueModifierExpr{Type: JSONInsertType, JSONDoc: yyDollar[3].exprUnion(), Params: yyDollar[5].jsonObjectParamsUnion()} } @@ -20175,7 +20168,7 @@ yydefault: case 1327: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6698 +//line sql.y:6688 { yyLOCAL = &JSONValueModifierExpr{Type: JSONReplaceType, JSONDoc: yyDollar[3].exprUnion(), Params: yyDollar[5].jsonObjectParamsUnion()} } @@ -20183,7 +20176,7 @@ yydefault: case 1328: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6702 +//line sql.y:6692 { yyLOCAL = &JSONValueModifierExpr{Type: JSONSetType, JSONDoc: yyDollar[3].exprUnion(), Params: yyDollar[5].jsonObjectParamsUnion()} } @@ -20191,7 +20184,7 @@ yydefault: case 1329: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6706 +//line sql.y:6696 { yyLOCAL = &JSONValueMergeExpr{Type: JSONMergeType, JSONDoc: yyDollar[3].exprUnion(), JSONDocList: yyDollar[5].exprsUnion()} } @@ -20199,7 +20192,7 @@ yydefault: case 1330: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6710 +//line sql.y:6700 { yyLOCAL = &JSONValueMergeExpr{Type: JSONMergePatchType, JSONDoc: yyDollar[3].exprUnion(), JSONDocList: yyDollar[5].exprsUnion()} } @@ -20207,7 +20200,7 @@ yydefault: case 1331: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6714 +//line sql.y:6704 { yyLOCAL = &JSONValueMergeExpr{Type: JSONMergePreserveType, JSONDoc: yyDollar[3].exprUnion(), JSONDocList: yyDollar[5].exprsUnion()} } @@ -20215,7 +20208,7 @@ yydefault: case 1332: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6718 +//line sql.y:6708 { yyLOCAL = &JSONRemoveExpr{JSONDoc: yyDollar[3].exprUnion(), PathList: yyDollar[5].exprsUnion()} } @@ -20223,7 +20216,7 @@ yydefault: case 1333: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6722 +//line sql.y:6712 { yyLOCAL = &JSONUnquoteExpr{JSONValue: yyDollar[3].exprUnion()} } @@ -20231,7 +20224,7 @@ yydefault: case 1334: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6726 +//line sql.y:6716 { yyLOCAL = &MultiPolygonExpr{PolygonParams: yyDollar[3].exprsUnion()} } @@ -20239,7 +20232,7 @@ yydefault: case 1335: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6730 +//line sql.y:6720 { yyLOCAL = &MultiPointExpr{PointParams: yyDollar[3].exprsUnion()} } @@ -20247,7 +20240,7 @@ yydefault: case 1336: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6734 +//line sql.y:6724 { yyLOCAL = &MultiLinestringExpr{LinestringParams: yyDollar[3].exprsUnion()} } @@ -20255,7 +20248,7 @@ yydefault: case 1337: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6738 +//line sql.y:6728 { yyLOCAL = &PolygonExpr{LinestringParams: yyDollar[3].exprsUnion()} } @@ -20263,7 +20256,7 @@ yydefault: case 1338: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6742 +//line sql.y:6732 { yyLOCAL = &LineStringExpr{PointParams: yyDollar[3].exprsUnion()} } @@ -20271,7 +20264,7 @@ yydefault: case 1339: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6746 +//line sql.y:6736 { yyLOCAL = &PointExpr{XCordinate: yyDollar[3].exprUnion(), YCordinate: yyDollar[5].exprUnion()} } @@ -20279,7 +20272,7 @@ yydefault: case 1340: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6750 +//line sql.y:6740 { yyLOCAL = &ArgumentLessWindowExpr{Type: yyDollar[1].argumentLessWindowExprTypeUnion(), OverClause: yyDollar[4].overClauseUnion()} } @@ -20287,7 +20280,7 @@ yydefault: case 1341: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6754 +//line sql.y:6744 { yyLOCAL = &FirstOrLastValueExpr{Type: yyDollar[1].firstOrLastValueExprTypeUnion(), Expr: yyDollar[3].exprUnion(), NullTreatmentClause: yyDollar[5].nullTreatmentClauseUnion(), OverClause: yyDollar[6].overClauseUnion()} } @@ -20295,7 +20288,7 @@ yydefault: case 1342: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Expr -//line sql.y:6758 +//line sql.y:6748 { yyLOCAL = &NtileExpr{N: yyDollar[3].exprUnion(), OverClause: yyDollar[5].overClauseUnion()} } @@ -20303,7 +20296,7 @@ yydefault: case 1343: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL Expr -//line sql.y:6762 +//line sql.y:6752 { yyLOCAL = &NTHValueExpr{Expr: yyDollar[3].exprUnion(), N: yyDollar[5].exprUnion(), FromFirstLastClause: yyDollar[7].fromFirstLastClauseUnion(), NullTreatmentClause: yyDollar[8].nullTreatmentClauseUnion(), OverClause: yyDollar[9].overClauseUnion()} } @@ -20311,7 +20304,7 @@ yydefault: case 1344: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6766 +//line sql.y:6756 { yyLOCAL = &LagLeadExpr{Type: yyDollar[1].lagLeadExprTypeUnion(), Expr: yyDollar[3].exprUnion(), NullTreatmentClause: yyDollar[5].nullTreatmentClauseUnion(), OverClause: yyDollar[6].overClauseUnion()} } @@ -20319,7 +20312,7 @@ yydefault: case 1345: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL Expr -//line sql.y:6770 +//line sql.y:6760 { yyLOCAL = &LagLeadExpr{Type: yyDollar[1].lagLeadExprTypeUnion(), Expr: yyDollar[3].exprUnion(), N: yyDollar[5].exprUnion(), Default: yyDollar[6].exprUnion(), NullTreatmentClause: yyDollar[8].nullTreatmentClauseUnion(), OverClause: yyDollar[9].overClauseUnion()} } @@ -20327,7 +20320,7 @@ yydefault: case 1346: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6774 +//line sql.y:6764 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprAdddate, Date: yyDollar[3].exprUnion(), Interval: yyDollar[6].exprUnion(), Unit: yyDollar[7].intervalTypeUnion()} } @@ -20335,7 +20328,7 @@ yydefault: case 1347: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6778 +//line sql.y:6768 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprAdddate, Date: yyDollar[3].exprUnion(), Interval: yyDollar[5].exprUnion(), Unit: IntervalNone} } @@ -20343,7 +20336,7 @@ yydefault: case 1348: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6782 +//line sql.y:6772 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprDateAdd, Date: yyDollar[3].exprUnion(), Interval: yyDollar[6].exprUnion(), Unit: yyDollar[7].intervalTypeUnion()} } @@ -20351,7 +20344,7 @@ yydefault: case 1349: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6786 +//line sql.y:6776 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprDateSub, Date: yyDollar[3].exprUnion(), Interval: yyDollar[6].exprUnion(), Unit: yyDollar[7].intervalTypeUnion()} } @@ -20359,7 +20352,7 @@ yydefault: case 1350: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6790 +//line sql.y:6780 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprSubdate, Date: yyDollar[3].exprUnion(), Interval: yyDollar[6].exprUnion(), Unit: yyDollar[7].intervalTypeUnion()} } @@ -20367,7 +20360,7 @@ yydefault: case 1351: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6794 +//line sql.y:6784 { yyLOCAL = &IntervalDateExpr{Syntax: IntervalDateExprSubdate, Date: yyDollar[3].exprUnion(), Interval: yyDollar[5].exprUnion(), Unit: IntervalNone} } @@ -20375,7 +20368,7 @@ yydefault: case 1356: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:6804 +//line sql.y:6794 { yyLOCAL = yyDollar[1].exprUnion() } @@ -20383,7 +20376,7 @@ yydefault: case 1357: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:6808 +//line sql.y:6798 { yyLOCAL = NewIntLiteral(yyDollar[1].str) } @@ -20391,7 +20384,7 @@ yydefault: case 1358: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:6812 +//line sql.y:6802 { yyLOCAL = yyDollar[1].variableUnion() } @@ -20399,7 +20392,7 @@ yydefault: case 1359: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:6816 +//line sql.y:6806 { yyLOCAL = parseBindVariable(yylex, yyDollar[1].str[1:]) } @@ -20407,7 +20400,7 @@ yydefault: case 1360: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Expr -//line sql.y:6821 +//line sql.y:6811 { yyLOCAL = nil } @@ -20415,7 +20408,7 @@ yydefault: case 1361: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:6825 +//line sql.y:6815 { yyLOCAL = yyDollar[2].exprUnion() } @@ -20423,7 +20416,7 @@ yydefault: case 1362: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6831 +//line sql.y:6821 { yyLOCAL = &RegexpInstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion()} } @@ -20431,7 +20424,7 @@ yydefault: case 1363: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6835 +//line sql.y:6825 { yyLOCAL = &RegexpInstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion()} } @@ -20439,7 +20432,7 @@ yydefault: case 1364: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Expr -//line sql.y:6839 +//line sql.y:6829 { yyLOCAL = &RegexpInstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion(), Occurrence: yyDollar[9].exprUnion()} } @@ -20447,7 +20440,7 @@ yydefault: case 1365: yyDollar = yyS[yypt-12 : yypt+1] var yyLOCAL Expr -//line sql.y:6843 +//line sql.y:6833 { yyLOCAL = &RegexpInstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion(), Occurrence: yyDollar[9].exprUnion(), ReturnOption: yyDollar[11].exprUnion()} } @@ -20455,7 +20448,7 @@ yydefault: case 1366: yyDollar = yyS[yypt-14 : yypt+1] var yyLOCAL Expr -//line sql.y:6847 +//line sql.y:6837 { // Match type is kept expression as TRIM( ' m ') is accepted yyLOCAL = &RegexpInstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion(), Occurrence: yyDollar[9].exprUnion(), ReturnOption: yyDollar[11].exprUnion(), MatchType: yyDollar[13].exprUnion()} @@ -20464,7 +20457,7 @@ yydefault: case 1367: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6852 +//line sql.y:6842 { yyLOCAL = &RegexpLikeExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion()} } @@ -20472,7 +20465,7 @@ yydefault: case 1368: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6856 +//line sql.y:6846 { yyLOCAL = &RegexpLikeExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), MatchType: yyDollar[7].exprUnion()} } @@ -20480,7 +20473,7 @@ yydefault: case 1369: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6860 +//line sql.y:6850 { yyLOCAL = &RegexpReplaceExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Repl: yyDollar[7].exprUnion()} } @@ -20488,7 +20481,7 @@ yydefault: case 1370: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Expr -//line sql.y:6864 +//line sql.y:6854 { yyLOCAL = &RegexpReplaceExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Repl: yyDollar[7].exprUnion(), Position: yyDollar[9].exprUnion()} } @@ -20496,7 +20489,7 @@ yydefault: case 1371: yyDollar = yyS[yypt-12 : yypt+1] var yyLOCAL Expr -//line sql.y:6868 +//line sql.y:6858 { yyLOCAL = &RegexpReplaceExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Repl: yyDollar[7].exprUnion(), Position: yyDollar[9].exprUnion(), Occurrence: yyDollar[11].exprUnion()} } @@ -20504,7 +20497,7 @@ yydefault: case 1372: yyDollar = yyS[yypt-14 : yypt+1] var yyLOCAL Expr -//line sql.y:6872 +//line sql.y:6862 { // Match type is kept expression as TRIM( ' m ') is accepted yyLOCAL = &RegexpReplaceExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Repl: yyDollar[7].exprUnion(), Position: yyDollar[9].exprUnion(), Occurrence: yyDollar[11].exprUnion(), MatchType: yyDollar[13].exprUnion()} @@ -20513,7 +20506,7 @@ yydefault: case 1373: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6877 +//line sql.y:6867 { yyLOCAL = &RegexpSubstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion()} } @@ -20521,7 +20514,7 @@ yydefault: case 1374: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6881 +//line sql.y:6871 { yyLOCAL = &RegexpSubstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion()} } @@ -20529,7 +20522,7 @@ yydefault: case 1375: yyDollar = yyS[yypt-10 : yypt+1] var yyLOCAL Expr -//line sql.y:6885 +//line sql.y:6875 { yyLOCAL = &RegexpSubstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion(), Occurrence: yyDollar[9].exprUnion()} } @@ -20537,7 +20530,7 @@ yydefault: case 1376: yyDollar = yyS[yypt-12 : yypt+1] var yyLOCAL Expr -//line sql.y:6889 +//line sql.y:6879 { // Match type is kept expression as TRIM( ' m ') is accepted yyLOCAL = &RegexpSubstrExpr{Expr: yyDollar[3].exprUnion(), Pattern: yyDollar[5].exprUnion(), Position: yyDollar[7].exprUnion(), Occurrence: yyDollar[9].exprUnion(), MatchType: yyDollar[11].exprUnion()} @@ -20546,7 +20539,7 @@ yydefault: case 1377: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6896 +//line sql.y:6886 { yyLOCAL = &ExtractValueExpr{Fragment: yyDollar[3].exprUnion(), XPathExpr: yyDollar[5].exprUnion()} } @@ -20554,7 +20547,7 @@ yydefault: case 1378: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6900 +//line sql.y:6890 { yyLOCAL = &UpdateXMLExpr{Target: yyDollar[3].exprUnion(), XPathExpr: yyDollar[5].exprUnion(), NewXML: yyDollar[7].exprUnion()} } @@ -20562,7 +20555,7 @@ yydefault: case 1379: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6906 +//line sql.y:6896 { yyLOCAL = &PerformanceSchemaFuncExpr{Type: FormatBytesType, Argument: yyDollar[3].exprUnion()} } @@ -20570,7 +20563,7 @@ yydefault: case 1380: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6910 +//line sql.y:6900 { yyLOCAL = &PerformanceSchemaFuncExpr{Type: FormatPicoTimeType, Argument: yyDollar[3].exprUnion()} } @@ -20578,7 +20571,7 @@ yydefault: case 1381: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Expr -//line sql.y:6914 +//line sql.y:6904 { yyLOCAL = &PerformanceSchemaFuncExpr{Type: PsCurrentThreadIDType} } @@ -20586,7 +20579,7 @@ yydefault: case 1382: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6918 +//line sql.y:6908 { yyLOCAL = &PerformanceSchemaFuncExpr{Type: PsThreadIDType, Argument: yyDollar[3].exprUnion()} } @@ -20594,7 +20587,7 @@ yydefault: case 1383: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6924 +//line sql.y:6914 { yyLOCAL = >IDFuncExpr{Type: GTIDSubsetType, Set1: yyDollar[3].exprUnion(), Set2: yyDollar[5].exprUnion()} } @@ -20602,7 +20595,7 @@ yydefault: case 1384: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6928 +//line sql.y:6918 { yyLOCAL = >IDFuncExpr{Type: GTIDSubtractType, Set1: yyDollar[3].exprUnion(), Set2: yyDollar[5].exprUnion()} } @@ -20610,7 +20603,7 @@ yydefault: case 1385: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6932 +//line sql.y:6922 { yyLOCAL = >IDFuncExpr{Type: WaitForExecutedGTIDSetType, Set1: yyDollar[3].exprUnion()} } @@ -20618,7 +20611,7 @@ yydefault: case 1386: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6936 +//line sql.y:6926 { yyLOCAL = >IDFuncExpr{Type: WaitForExecutedGTIDSetType, Set1: yyDollar[3].exprUnion(), Timeout: yyDollar[5].exprUnion()} } @@ -20626,7 +20619,7 @@ yydefault: case 1387: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:6940 +//line sql.y:6930 { yyLOCAL = >IDFuncExpr{Type: WaitUntilSQLThreadAfterGTIDSType, Set1: yyDollar[3].exprUnion()} } @@ -20634,7 +20627,7 @@ yydefault: case 1388: yyDollar = yyS[yypt-6 : yypt+1] var yyLOCAL Expr -//line sql.y:6944 +//line sql.y:6934 { yyLOCAL = >IDFuncExpr{Type: WaitUntilSQLThreadAfterGTIDSType, Set1: yyDollar[3].exprUnion(), Timeout: yyDollar[5].exprUnion()} } @@ -20642,7 +20635,7 @@ yydefault: case 1389: yyDollar = yyS[yypt-8 : yypt+1] var yyLOCAL Expr -//line sql.y:6948 +//line sql.y:6938 { yyLOCAL = >IDFuncExpr{Type: WaitUntilSQLThreadAfterGTIDSType, Set1: yyDollar[3].exprUnion(), Timeout: yyDollar[5].exprUnion(), Channel: yyDollar[7].exprUnion()} } @@ -20650,7 +20643,7 @@ yydefault: case 1390: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:6953 +//line sql.y:6943 { yyLOCAL = nil } @@ -20658,7 +20651,7 @@ yydefault: case 1391: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:6957 +//line sql.y:6947 { yyLOCAL = yyDollar[2].convertTypeUnion() } @@ -20666,7 +20659,7 @@ yydefault: case 1392: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6963 +//line sql.y:6953 { yyLOCAL = IntervalDayHour } @@ -20674,7 +20667,7 @@ yydefault: case 1393: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6967 +//line sql.y:6957 { yyLOCAL = IntervalDayMicrosecond } @@ -20682,7 +20675,7 @@ yydefault: case 1394: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6971 +//line sql.y:6961 { yyLOCAL = IntervalDayMinute } @@ -20690,7 +20683,7 @@ yydefault: case 1395: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6975 +//line sql.y:6965 { yyLOCAL = IntervalDaySecond } @@ -20698,7 +20691,7 @@ yydefault: case 1396: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6979 +//line sql.y:6969 { yyLOCAL = IntervalHourMicrosecond } @@ -20706,7 +20699,7 @@ yydefault: case 1397: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6983 +//line sql.y:6973 { yyLOCAL = IntervalHourMinute } @@ -20714,7 +20707,7 @@ yydefault: case 1398: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6987 +//line sql.y:6977 { yyLOCAL = IntervalHourSecond } @@ -20722,7 +20715,7 @@ yydefault: case 1399: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6991 +//line sql.y:6981 { yyLOCAL = IntervalMinuteMicrosecond } @@ -20730,7 +20723,7 @@ yydefault: case 1400: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6995 +//line sql.y:6985 { yyLOCAL = IntervalMinuteSecond } @@ -20738,7 +20731,7 @@ yydefault: case 1401: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:6999 +//line sql.y:6989 { yyLOCAL = IntervalSecondMicrosecond } @@ -20746,7 +20739,7 @@ yydefault: case 1402: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7003 +//line sql.y:6993 { yyLOCAL = IntervalYearMonth } @@ -20754,7 +20747,7 @@ yydefault: case 1403: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7007 +//line sql.y:6997 { yyLOCAL = IntervalDay } @@ -20762,7 +20755,7 @@ yydefault: case 1404: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7011 +//line sql.y:7001 { yyLOCAL = IntervalWeek } @@ -20770,7 +20763,7 @@ yydefault: case 1405: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7015 +//line sql.y:7005 { yyLOCAL = IntervalHour } @@ -20778,7 +20771,7 @@ yydefault: case 1406: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7019 +//line sql.y:7009 { yyLOCAL = IntervalMinute } @@ -20786,7 +20779,7 @@ yydefault: case 1407: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7023 +//line sql.y:7013 { yyLOCAL = IntervalMonth } @@ -20794,7 +20787,7 @@ yydefault: case 1408: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7027 +//line sql.y:7017 { yyLOCAL = IntervalQuarter } @@ -20802,7 +20795,7 @@ yydefault: case 1409: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7031 +//line sql.y:7021 { yyLOCAL = IntervalSecond } @@ -20810,7 +20803,7 @@ yydefault: case 1410: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7035 +//line sql.y:7025 { yyLOCAL = IntervalMicrosecond } @@ -20818,7 +20811,7 @@ yydefault: case 1411: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7039 +//line sql.y:7029 { yyLOCAL = IntervalYear } @@ -20826,7 +20819,7 @@ yydefault: case 1412: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7045 +//line sql.y:7035 { yyLOCAL = IntervalDay } @@ -20834,7 +20827,7 @@ yydefault: case 1413: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7049 +//line sql.y:7039 { yyLOCAL = IntervalWeek } @@ -20842,7 +20835,7 @@ yydefault: case 1414: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7053 +//line sql.y:7043 { yyLOCAL = IntervalHour } @@ -20850,7 +20843,7 @@ yydefault: case 1415: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7057 +//line sql.y:7047 { yyLOCAL = IntervalMinute } @@ -20858,7 +20851,7 @@ yydefault: case 1416: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7061 +//line sql.y:7051 { yyLOCAL = IntervalMonth } @@ -20866,7 +20859,7 @@ yydefault: case 1417: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7065 +//line sql.y:7055 { yyLOCAL = IntervalQuarter } @@ -20874,7 +20867,7 @@ yydefault: case 1418: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7069 +//line sql.y:7059 { yyLOCAL = IntervalSecond } @@ -20882,7 +20875,7 @@ yydefault: case 1419: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7073 +//line sql.y:7063 { yyLOCAL = IntervalMicrosecond } @@ -20890,7 +20883,7 @@ yydefault: case 1420: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7077 +//line sql.y:7067 { yyLOCAL = IntervalYear } @@ -20898,7 +20891,7 @@ yydefault: case 1421: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7081 +//line sql.y:7071 { yyLOCAL = IntervalDay } @@ -20906,7 +20899,7 @@ yydefault: case 1422: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7085 +//line sql.y:7075 { yyLOCAL = IntervalWeek } @@ -20914,7 +20907,7 @@ yydefault: case 1423: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7089 +//line sql.y:7079 { yyLOCAL = IntervalHour } @@ -20922,7 +20915,7 @@ yydefault: case 1424: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7093 +//line sql.y:7083 { yyLOCAL = IntervalMinute } @@ -20930,7 +20923,7 @@ yydefault: case 1425: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7097 +//line sql.y:7087 { yyLOCAL = IntervalMonth } @@ -20938,7 +20931,7 @@ yydefault: case 1426: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7101 +//line sql.y:7091 { yyLOCAL = IntervalQuarter } @@ -20946,7 +20939,7 @@ yydefault: case 1427: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7105 +//line sql.y:7095 { yyLOCAL = IntervalSecond } @@ -20954,7 +20947,7 @@ yydefault: case 1428: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7109 +//line sql.y:7099 { yyLOCAL = IntervalMicrosecond } @@ -20962,7 +20955,7 @@ yydefault: case 1429: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL IntervalType -//line sql.y:7113 +//line sql.y:7103 { yyLOCAL = IntervalYear } @@ -20970,7 +20963,7 @@ yydefault: case 1432: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL int -//line sql.y:7123 +//line sql.y:7113 { yyLOCAL = 0 } @@ -20978,7 +20971,7 @@ yydefault: case 1433: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL int -//line sql.y:7127 +//line sql.y:7117 { yyLOCAL = 0 } @@ -20986,7 +20979,7 @@ yydefault: case 1434: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL int -//line sql.y:7131 +//line sql.y:7121 { yyLOCAL = convertStringToInt(yyDollar[2].str) } @@ -20994,7 +20987,7 @@ yydefault: case 1435: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:7141 +//line sql.y:7131 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("if"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -21002,7 +20995,7 @@ yydefault: case 1436: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:7145 +//line sql.y:7135 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("database"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -21010,7 +21003,7 @@ yydefault: case 1437: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:7149 +//line sql.y:7139 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("schema"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -21018,7 +21011,7 @@ yydefault: case 1438: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:7153 +//line sql.y:7143 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("mod"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -21026,7 +21019,7 @@ yydefault: case 1439: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Expr -//line sql.y:7157 +//line sql.y:7147 { yyLOCAL = &FuncExpr{Name: NewIdentifierCI("replace"), Exprs: yyDollar[3].selectExprsUnion()} } @@ -21034,7 +21027,7 @@ yydefault: case 1440: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL MatchExprOption -//line sql.y:7163 +//line sql.y:7153 { yyLOCAL = NoOption } @@ -21042,7 +21035,7 @@ yydefault: case 1441: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL MatchExprOption -//line sql.y:7167 +//line sql.y:7157 { yyLOCAL = BooleanModeOpt } @@ -21050,7 +21043,7 @@ yydefault: case 1442: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL MatchExprOption -//line sql.y:7171 +//line sql.y:7161 { yyLOCAL = NaturalLanguageModeOpt } @@ -21058,7 +21051,7 @@ yydefault: case 1443: yyDollar = yyS[yypt-7 : yypt+1] var yyLOCAL MatchExprOption -//line sql.y:7175 +//line sql.y:7165 { yyLOCAL = NaturalLanguageModeWithQueryExpansionOpt } @@ -21066,33 +21059,33 @@ yydefault: case 1444: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL MatchExprOption -//line sql.y:7179 +//line sql.y:7169 { yyLOCAL = QueryExpansionOpt } yyVAL.union = yyLOCAL case 1445: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7185 +//line sql.y:7175 { yyVAL.str = string(yyDollar[1].identifierCI.String()) } case 1446: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7189 +//line sql.y:7179 { yyVAL.str = string(yyDollar[1].str) } case 1447: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7193 +//line sql.y:7183 { yyVAL.str = string(yyDollar[1].str) } case 1448: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7199 +//line sql.y:7189 { yyLOCAL = nil } @@ -21100,39 +21093,39 @@ yydefault: case 1449: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7203 +//line sql.y:7193 { - yyLOCAL = &ConvertType{Type: string(yyDollar[2].str), Length: NewIntLiteral(yyDollar[4].str)} + yyLOCAL = &ConvertType{Type: string(yyDollar[2].str), Length: ptr.Of(convertStringToInt(yyDollar[4].str))} } yyVAL.union = yyLOCAL case 1450: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7207 +//line sql.y:7197 { - yyLOCAL = &ConvertType{Type: string(yyDollar[2].str), Length: NewIntLiteral(yyDollar[4].str)} + yyLOCAL = &ConvertType{Type: string(yyDollar[2].str), Length: ptr.Of(convertStringToInt(yyDollar[4].str))} } yyVAL.union = yyLOCAL case 1451: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7213 +//line sql.y:7203 { - yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } yyVAL.union = yyLOCAL case 1452: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7217 +//line sql.y:7207 { - yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion(), Charset: yyDollar[3].columnCharset} + yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion(), Charset: yyDollar[3].columnCharset} } yyVAL.union = yyLOCAL case 1453: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7221 +//line sql.y:7211 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21140,15 +21133,15 @@ yydefault: case 1454: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7225 +//line sql.y:7215 { - yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } yyVAL.union = yyLOCAL case 1455: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7229 +//line sql.y:7219 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} yyLOCAL.Length = yyDollar[2].LengthScaleOption.Length @@ -21158,7 +21151,7 @@ yydefault: case 1456: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7235 +//line sql.y:7225 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21166,15 +21159,15 @@ yydefault: case 1457: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7239 +//line sql.y:7229 { - yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } yyVAL.union = yyLOCAL case 1458: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7243 +//line sql.y:7233 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21182,7 +21175,7 @@ yydefault: case 1459: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7247 +//line sql.y:7237 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21190,15 +21183,15 @@ yydefault: case 1460: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7251 +//line sql.y:7241 { - yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } yyVAL.union = yyLOCAL case 1461: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7255 +//line sql.y:7245 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21206,7 +21199,7 @@ yydefault: case 1462: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7259 +//line sql.y:7249 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21214,15 +21207,15 @@ yydefault: case 1463: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7263 +//line sql.y:7253 { - yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].literalUnion()} + yyLOCAL = &ConvertType{Type: string(yyDollar[1].str), Length: yyDollar[2].intPtrUnion()} } yyVAL.union = yyLOCAL case 1464: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7267 +//line sql.y:7257 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21230,7 +21223,7 @@ yydefault: case 1465: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ConvertType -//line sql.y:7271 +//line sql.y:7261 { yyLOCAL = &ConvertType{Type: string(yyDollar[1].str)} } @@ -21238,7 +21231,7 @@ yydefault: case 1466: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:7277 +//line sql.y:7267 { yyLOCAL = false } @@ -21246,7 +21239,7 @@ yydefault: case 1467: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:7281 +//line sql.y:7271 { yyLOCAL = true } @@ -21254,7 +21247,7 @@ yydefault: case 1468: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Expr -//line sql.y:7286 +//line sql.y:7276 { yyLOCAL = nil } @@ -21262,34 +21255,34 @@ yydefault: case 1469: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7290 +//line sql.y:7280 { yyLOCAL = yyDollar[1].exprUnion() } yyVAL.union = yyLOCAL case 1470: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7295 +//line sql.y:7285 { yyVAL.str = string("") } case 1471: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7299 +//line sql.y:7289 { yyVAL.str = encodeSQLString(yyDollar[2].str) } case 1472: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*When -//line sql.y:7305 +//line sql.y:7295 { yyLOCAL = []*When{yyDollar[1].whenUnion()} } yyVAL.union = yyLOCAL case 1473: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7309 +//line sql.y:7299 { yySLICE := (*[]*When)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[2].whenUnion()) @@ -21297,7 +21290,7 @@ yydefault: case 1474: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *When -//line sql.y:7315 +//line sql.y:7305 { yyLOCAL = &When{Cond: yyDollar[2].exprUnion(), Val: yyDollar[4].exprUnion()} } @@ -21305,7 +21298,7 @@ yydefault: case 1475: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Expr -//line sql.y:7320 +//line sql.y:7310 { yyLOCAL = nil } @@ -21313,7 +21306,7 @@ yydefault: case 1476: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:7324 +//line sql.y:7314 { yyLOCAL = yyDollar[2].exprUnion() } @@ -21321,7 +21314,7 @@ yydefault: case 1477: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ColName -//line sql.y:7330 +//line sql.y:7320 { yyLOCAL = &ColName{Name: yyDollar[1].identifierCI} } @@ -21329,7 +21322,7 @@ yydefault: case 1478: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *ColName -//line sql.y:7334 +//line sql.y:7324 { yyLOCAL = &ColName{Name: NewIdentifierCI(string(yyDollar[1].str))} } @@ -21337,7 +21330,7 @@ yydefault: case 1479: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *ColName -//line sql.y:7338 +//line sql.y:7328 { yyLOCAL = &ColName{Qualifier: TableName{Name: yyDollar[1].identifierCS}, Name: yyDollar[3].identifierCI} } @@ -21345,7 +21338,7 @@ yydefault: case 1480: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *ColName -//line sql.y:7342 +//line sql.y:7332 { yyLOCAL = &ColName{Qualifier: TableName{Qualifier: yyDollar[1].identifierCS, Name: yyDollar[3].identifierCS}, Name: yyDollar[5].identifierCI} } @@ -21353,7 +21346,7 @@ yydefault: case 1481: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7348 +//line sql.y:7338 { yyLOCAL = yyDollar[1].colNameUnion() } @@ -21361,7 +21354,7 @@ yydefault: case 1482: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7352 +//line sql.y:7342 { yyLOCAL = &Offset{V: convertStringToInt(yyDollar[1].str)} } @@ -21369,7 +21362,7 @@ yydefault: case 1483: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7358 +//line sql.y:7348 { // TODO(sougou): Deprecate this construct. if yyDollar[1].identifierCI.Lowered() != "value" { @@ -21382,7 +21375,7 @@ yydefault: case 1484: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:7367 +//line sql.y:7357 { yyLOCAL = NewIntLiteral(yyDollar[1].str) } @@ -21390,7 +21383,7 @@ yydefault: case 1485: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:7371 +//line sql.y:7361 { yyLOCAL = parseBindVariable(yylex, yyDollar[1].str[1:]) } @@ -21398,7 +21391,7 @@ yydefault: case 1486: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Exprs -//line sql.y:7376 +//line sql.y:7366 { yyLOCAL = nil } @@ -21406,7 +21399,7 @@ yydefault: case 1487: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Exprs -//line sql.y:7380 +//line sql.y:7370 { yyLOCAL = yyDollar[3].exprsUnion() } @@ -21414,7 +21407,7 @@ yydefault: case 1488: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Expr -//line sql.y:7385 +//line sql.y:7375 { yyLOCAL = nil } @@ -21422,7 +21415,7 @@ yydefault: case 1489: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Expr -//line sql.y:7389 +//line sql.y:7379 { yyLOCAL = yyDollar[2].exprUnion() } @@ -21430,7 +21423,7 @@ yydefault: case 1490: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *NamedWindow -//line sql.y:7395 +//line sql.y:7385 { yyLOCAL = &NamedWindow{yyDollar[2].windowDefinitionsUnion()} } @@ -21438,14 +21431,14 @@ yydefault: case 1491: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL NamedWindows -//line sql.y:7401 +//line sql.y:7391 { yyLOCAL = NamedWindows{yyDollar[1].namedWindowUnion()} } yyVAL.union = yyLOCAL case 1492: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7405 +//line sql.y:7395 { yySLICE := (*NamedWindows)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].namedWindowUnion()) @@ -21453,7 +21446,7 @@ yydefault: case 1493: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL NamedWindows -//line sql.y:7410 +//line sql.y:7400 { yyLOCAL = nil } @@ -21461,7 +21454,7 @@ yydefault: case 1494: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL NamedWindows -//line sql.y:7414 +//line sql.y:7404 { yyLOCAL = yyDollar[1].namedWindowsUnion() } @@ -21469,7 +21462,7 @@ yydefault: case 1495: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL OrderBy -//line sql.y:7419 +//line sql.y:7409 { yyLOCAL = nil } @@ -21477,7 +21470,7 @@ yydefault: case 1496: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL OrderBy -//line sql.y:7423 +//line sql.y:7413 { yyLOCAL = yyDollar[1].orderByUnion() } @@ -21485,7 +21478,7 @@ yydefault: case 1497: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL OrderBy -//line sql.y:7429 +//line sql.y:7419 { yyLOCAL = yyDollar[3].orderByUnion() } @@ -21493,14 +21486,14 @@ yydefault: case 1498: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL OrderBy -//line sql.y:7435 +//line sql.y:7425 { yyLOCAL = OrderBy{yyDollar[1].orderUnion()} } yyVAL.union = yyLOCAL case 1499: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7439 +//line sql.y:7429 { yySLICE := (*OrderBy)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].orderUnion()) @@ -21508,7 +21501,7 @@ yydefault: case 1500: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Order -//line sql.y:7445 +//line sql.y:7435 { yyLOCAL = &Order{Expr: yyDollar[1].exprUnion(), Direction: yyDollar[2].orderDirectionUnion()} } @@ -21516,7 +21509,7 @@ yydefault: case 1501: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL OrderDirection -//line sql.y:7450 +//line sql.y:7440 { yyLOCAL = AscOrder } @@ -21524,7 +21517,7 @@ yydefault: case 1502: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL OrderDirection -//line sql.y:7454 +//line sql.y:7444 { yyLOCAL = AscOrder } @@ -21532,7 +21525,7 @@ yydefault: case 1503: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL OrderDirection -//line sql.y:7458 +//line sql.y:7448 { yyLOCAL = DescOrder } @@ -21540,7 +21533,7 @@ yydefault: case 1504: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *Limit -//line sql.y:7463 +//line sql.y:7453 { yyLOCAL = nil } @@ -21548,7 +21541,7 @@ yydefault: case 1505: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Limit -//line sql.y:7467 +//line sql.y:7457 { yyLOCAL = yyDollar[1].limitUnion() } @@ -21556,7 +21549,7 @@ yydefault: case 1506: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Limit -//line sql.y:7473 +//line sql.y:7463 { yyLOCAL = &Limit{Rowcount: yyDollar[2].exprUnion()} } @@ -21564,7 +21557,7 @@ yydefault: case 1507: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *Limit -//line sql.y:7477 +//line sql.y:7467 { yyLOCAL = &Limit{Offset: yyDollar[2].exprUnion(), Rowcount: yyDollar[4].exprUnion()} } @@ -21572,7 +21565,7 @@ yydefault: case 1508: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *Limit -//line sql.y:7481 +//line sql.y:7471 { yyLOCAL = &Limit{Offset: yyDollar[4].exprUnion(), Rowcount: yyDollar[2].exprUnion()} } @@ -21580,7 +21573,7 @@ yydefault: case 1509: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:7486 +//line sql.y:7476 { yyLOCAL = nil } @@ -21588,7 +21581,7 @@ yydefault: case 1510: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:7490 +//line sql.y:7480 { yyLOCAL = []AlterOption{yyDollar[1].alterOptionUnion(), yyDollar[2].alterOptionUnion()} } @@ -21596,7 +21589,7 @@ yydefault: case 1511: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:7494 +//line sql.y:7484 { yyLOCAL = []AlterOption{yyDollar[1].alterOptionUnion(), yyDollar[2].alterOptionUnion()} } @@ -21604,7 +21597,7 @@ yydefault: case 1512: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:7498 +//line sql.y:7488 { yyLOCAL = []AlterOption{yyDollar[1].alterOptionUnion()} } @@ -21612,7 +21605,7 @@ yydefault: case 1513: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []AlterOption -//line sql.y:7502 +//line sql.y:7492 { yyLOCAL = []AlterOption{yyDollar[1].alterOptionUnion()} } @@ -21620,7 +21613,7 @@ yydefault: case 1514: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7509 +//line sql.y:7499 { yyLOCAL = &LockOption{Type: DefaultType} } @@ -21628,7 +21621,7 @@ yydefault: case 1515: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7513 +//line sql.y:7503 { yyLOCAL = &LockOption{Type: NoneType} } @@ -21636,7 +21629,7 @@ yydefault: case 1516: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7517 +//line sql.y:7507 { yyLOCAL = &LockOption{Type: SharedType} } @@ -21644,7 +21637,7 @@ yydefault: case 1517: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7521 +//line sql.y:7511 { yyLOCAL = &LockOption{Type: ExclusiveType} } @@ -21652,7 +21645,7 @@ yydefault: case 1518: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7527 +//line sql.y:7517 { yyLOCAL = AlgorithmValue(yyDollar[3].str) } @@ -21660,7 +21653,7 @@ yydefault: case 1519: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7531 +//line sql.y:7521 { yyLOCAL = AlgorithmValue(yyDollar[3].str) } @@ -21668,7 +21661,7 @@ yydefault: case 1520: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7535 +//line sql.y:7525 { yyLOCAL = AlgorithmValue(yyDollar[3].str) } @@ -21676,93 +21669,93 @@ yydefault: case 1521: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL AlterOption -//line sql.y:7539 +//line sql.y:7529 { yyLOCAL = AlgorithmValue(yyDollar[3].str) } yyVAL.union = yyLOCAL case 1522: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7544 +//line sql.y:7534 { yyVAL.str = "" } case 1523: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7548 +//line sql.y:7538 { yyVAL.str = string(yyDollar[3].str) } case 1524: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7552 +//line sql.y:7542 { yyVAL.str = string(yyDollar[3].str) } case 1525: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7556 +//line sql.y:7546 { yyVAL.str = string(yyDollar[3].str) } case 1526: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7561 +//line sql.y:7551 { yyVAL.str = "" } case 1527: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7565 +//line sql.y:7555 { yyVAL.str = yyDollar[3].str } case 1528: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7571 +//line sql.y:7561 { yyVAL.str = string(yyDollar[1].str) } case 1529: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7575 +//line sql.y:7565 { yyVAL.str = string(yyDollar[1].str) } case 1530: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7580 +//line sql.y:7570 { yyVAL.str = "" } case 1531: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:7584 +//line sql.y:7574 { yyVAL.str = yyDollar[2].str } case 1532: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7589 +//line sql.y:7579 { yyVAL.str = "cascaded" } case 1533: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7593 +//line sql.y:7583 { yyVAL.str = string(yyDollar[1].str) } case 1534: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7597 +//line sql.y:7587 { yyVAL.str = string(yyDollar[1].str) } case 1535: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL *Definer -//line sql.y:7602 +//line sql.y:7592 { yyLOCAL = nil } @@ -21770,7 +21763,7 @@ yydefault: case 1536: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *Definer -//line sql.y:7606 +//line sql.y:7596 { yyLOCAL = yyDollar[3].definerUnion() } @@ -21778,7 +21771,7 @@ yydefault: case 1537: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Definer -//line sql.y:7612 +//line sql.y:7602 { yyLOCAL = &Definer{ Name: string(yyDollar[1].str), @@ -21788,7 +21781,7 @@ yydefault: case 1538: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *Definer -//line sql.y:7618 +//line sql.y:7608 { yyLOCAL = &Definer{ Name: string(yyDollar[1].str), @@ -21798,7 +21791,7 @@ yydefault: case 1539: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Definer -//line sql.y:7624 +//line sql.y:7614 { yyLOCAL = &Definer{ Name: yyDollar[1].str, @@ -21808,32 +21801,32 @@ yydefault: yyVAL.union = yyLOCAL case 1540: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7633 +//line sql.y:7623 { yyVAL.str = encodeSQLString(yyDollar[1].str) } case 1541: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7637 +//line sql.y:7627 { yyVAL.str = formatIdentifier(yyDollar[1].str) } case 1542: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7642 +//line sql.y:7632 { yyVAL.str = "" } case 1543: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7646 +//line sql.y:7636 { yyVAL.str = formatAddress(yyDollar[1].str) } case 1544: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL Lock -//line sql.y:7652 +//line sql.y:7642 { yyLOCAL = ForUpdateLock } @@ -21841,7 +21834,7 @@ yydefault: case 1545: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL Lock -//line sql.y:7656 +//line sql.y:7646 { yyLOCAL = ShareModeLock } @@ -21849,7 +21842,7 @@ yydefault: case 1546: yyDollar = yyS[yypt-9 : yypt+1] var yyLOCAL *SelectInto -//line sql.y:7662 +//line sql.y:7652 { yyLOCAL = &SelectInto{Type: IntoOutfileS3, FileName: encodeSQLString(yyDollar[4].str), Charset: yyDollar[5].columnCharset, FormatOption: yyDollar[6].str, ExportOption: yyDollar[7].str, Manifest: yyDollar[8].str, Overwrite: yyDollar[9].str} } @@ -21857,7 +21850,7 @@ yydefault: case 1547: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *SelectInto -//line sql.y:7666 +//line sql.y:7656 { yyLOCAL = &SelectInto{Type: IntoDumpfile, FileName: encodeSQLString(yyDollar[3].str), Charset: ColumnCharset{}, FormatOption: "", ExportOption: "", Manifest: "", Overwrite: ""} } @@ -21865,177 +21858,177 @@ yydefault: case 1548: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *SelectInto -//line sql.y:7670 +//line sql.y:7660 { yyLOCAL = &SelectInto{Type: IntoOutfile, FileName: encodeSQLString(yyDollar[3].str), Charset: yyDollar[4].columnCharset, FormatOption: "", ExportOption: yyDollar[5].str, Manifest: "", Overwrite: ""} } yyVAL.union = yyLOCAL case 1549: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7675 +//line sql.y:7665 { yyVAL.str = "" } case 1550: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7679 +//line sql.y:7669 { yyVAL.str = " format csv" + yyDollar[3].str } case 1551: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7683 +//line sql.y:7673 { yyVAL.str = " format text" + yyDollar[3].str } case 1552: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7688 +//line sql.y:7678 { yyVAL.str = "" } case 1553: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7692 +//line sql.y:7682 { yyVAL.str = " header" } case 1554: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7697 +//line sql.y:7687 { yyVAL.str = "" } case 1555: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7701 +//line sql.y:7691 { yyVAL.str = " manifest on" } case 1556: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7705 +//line sql.y:7695 { yyVAL.str = " manifest off" } case 1557: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7710 +//line sql.y:7700 { yyVAL.str = "" } case 1558: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7714 +//line sql.y:7704 { yyVAL.str = " overwrite on" } case 1559: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7718 +//line sql.y:7708 { yyVAL.str = " overwrite off" } case 1560: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7724 +//line sql.y:7714 { yyVAL.str = yyDollar[1].str + yyDollar[2].str } case 1561: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7729 +//line sql.y:7719 { yyVAL.str = "" } case 1562: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7733 +//line sql.y:7723 { yyVAL.str = " lines" + yyDollar[2].str } case 1563: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7739 +//line sql.y:7729 { yyVAL.str = yyDollar[1].str } case 1564: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7743 +//line sql.y:7733 { yyVAL.str = yyDollar[1].str + yyDollar[2].str } case 1565: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7749 +//line sql.y:7739 { yyVAL.str = " starting by " + encodeSQLString(yyDollar[3].str) } case 1566: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7753 +//line sql.y:7743 { yyVAL.str = " terminated by " + encodeSQLString(yyDollar[3].str) } case 1567: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7758 +//line sql.y:7748 { yyVAL.str = "" } case 1568: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7762 +//line sql.y:7752 { yyVAL.str = " " + yyDollar[1].str + yyDollar[2].str } case 1569: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7768 +//line sql.y:7758 { yyVAL.str = yyDollar[1].str } case 1570: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7772 +//line sql.y:7762 { yyVAL.str = yyDollar[1].str + yyDollar[2].str } case 1571: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7778 +//line sql.y:7768 { yyVAL.str = " terminated by " + encodeSQLString(yyDollar[3].str) } case 1572: yyDollar = yyS[yypt-4 : yypt+1] -//line sql.y:7782 +//line sql.y:7772 { yyVAL.str = yyDollar[1].str + " enclosed by " + encodeSQLString(yyDollar[4].str) } case 1573: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7786 +//line sql.y:7776 { yyVAL.str = " escaped by " + encodeSQLString(yyDollar[3].str) } case 1574: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7791 +//line sql.y:7781 { yyVAL.str = "" } case 1575: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7795 +//line sql.y:7785 { yyVAL.str = " optionally" } case 1576: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *Insert -//line sql.y:7808 +//line sql.y:7798 { yyLOCAL = &Insert{Rows: yyDollar[2].valuesUnion()} } @@ -22043,7 +22036,7 @@ yydefault: case 1577: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL *Insert -//line sql.y:7812 +//line sql.y:7802 { yyLOCAL = &Insert{Rows: yyDollar[1].selStmtUnion()} } @@ -22051,7 +22044,7 @@ yydefault: case 1578: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL *Insert -//line sql.y:7816 +//line sql.y:7806 { yyLOCAL = &Insert{Columns: yyDollar[2].columnsUnion(), Rows: yyDollar[5].valuesUnion()} } @@ -22059,7 +22052,7 @@ yydefault: case 1579: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *Insert -//line sql.y:7820 +//line sql.y:7810 { yyLOCAL = &Insert{Columns: []IdentifierCI{}, Rows: yyDollar[4].valuesUnion()} } @@ -22067,7 +22060,7 @@ yydefault: case 1580: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL *Insert -//line sql.y:7824 +//line sql.y:7814 { yyLOCAL = &Insert{Columns: yyDollar[2].columnsUnion(), Rows: yyDollar[4].selStmtUnion()} } @@ -22075,7 +22068,7 @@ yydefault: case 1581: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Columns -//line sql.y:7830 +//line sql.y:7820 { yyLOCAL = Columns{yyDollar[1].identifierCI} } @@ -22083,21 +22076,21 @@ yydefault: case 1582: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Columns -//line sql.y:7834 +//line sql.y:7824 { yyLOCAL = Columns{yyDollar[3].identifierCI} } yyVAL.union = yyLOCAL case 1583: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7838 +//line sql.y:7828 { yySLICE := (*Columns)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].identifierCI) } case 1584: yyDollar = yyS[yypt-5 : yypt+1] -//line sql.y:7842 +//line sql.y:7832 { yySLICE := (*Columns)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[5].identifierCI) @@ -22105,7 +22098,7 @@ yydefault: case 1585: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL UpdateExprs -//line sql.y:7847 +//line sql.y:7837 { yyLOCAL = nil } @@ -22113,7 +22106,7 @@ yydefault: case 1586: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL UpdateExprs -//line sql.y:7851 +//line sql.y:7841 { yyLOCAL = yyDollar[5].updateExprsUnion() } @@ -22121,14 +22114,14 @@ yydefault: case 1587: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Values -//line sql.y:7857 +//line sql.y:7847 { yyLOCAL = Values{yyDollar[1].valTupleUnion()} } yyVAL.union = yyLOCAL case 1588: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7861 +//line sql.y:7851 { yySLICE := (*Values)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].valTupleUnion()) @@ -22136,7 +22129,7 @@ yydefault: case 1589: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL ValTuple -//line sql.y:7867 +//line sql.y:7857 { yyLOCAL = yyDollar[1].valTupleUnion() } @@ -22144,7 +22137,7 @@ yydefault: case 1590: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL ValTuple -//line sql.y:7871 +//line sql.y:7861 { yyLOCAL = ValTuple{} } @@ -22152,7 +22145,7 @@ yydefault: case 1591: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL ValTuple -//line sql.y:7877 +//line sql.y:7867 { yyLOCAL = ValTuple(yyDollar[2].exprsUnion()) } @@ -22160,7 +22153,7 @@ yydefault: case 1592: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL ValTuple -//line sql.y:7881 +//line sql.y:7871 { yyLOCAL = ValTuple(yyDollar[3].exprsUnion()) } @@ -22168,7 +22161,7 @@ yydefault: case 1593: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7886 +//line sql.y:7876 { if len(yyDollar[1].valTupleUnion()) == 1 { yyLOCAL = yyDollar[1].valTupleUnion()[0] @@ -22180,14 +22173,14 @@ yydefault: case 1594: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL UpdateExprs -//line sql.y:7896 +//line sql.y:7886 { yyLOCAL = UpdateExprs{yyDollar[1].updateExprUnion()} } yyVAL.union = yyLOCAL case 1595: yyDollar = yyS[yypt-3 : yypt+1] -//line sql.y:7900 +//line sql.y:7890 { yySLICE := (*UpdateExprs)(yyIaddr(yyVAL.union)) *yySLICE = append(*yySLICE, yyDollar[3].updateExprUnion()) @@ -22195,21 +22188,21 @@ yydefault: case 1596: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL *UpdateExpr -//line sql.y:7906 +//line sql.y:7896 { yyLOCAL = &UpdateExpr{Name: yyDollar[1].colNameUnion(), Expr: yyDollar[3].exprUnion()} } yyVAL.union = yyLOCAL case 1598: yyDollar = yyS[yypt-2 : yypt+1] -//line sql.y:7913 +//line sql.y:7903 { yyVAL.str = "charset" } case 1601: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7923 +//line sql.y:7913 { yyLOCAL = NewStrLiteral(yyDollar[1].identifierCI.String()) } @@ -22217,7 +22210,7 @@ yydefault: case 1602: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7927 +//line sql.y:7917 { yyLOCAL = NewStrLiteral(yyDollar[1].str) } @@ -22225,7 +22218,7 @@ yydefault: case 1603: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Expr -//line sql.y:7931 +//line sql.y:7921 { yyLOCAL = &Default{} } @@ -22233,7 +22226,7 @@ yydefault: case 1606: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:7940 +//line sql.y:7930 { yyLOCAL = false } @@ -22241,7 +22234,7 @@ yydefault: case 1607: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL bool -//line sql.y:7942 +//line sql.y:7932 { yyLOCAL = true } @@ -22249,7 +22242,7 @@ yydefault: case 1608: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:7945 +//line sql.y:7935 { yyLOCAL = false } @@ -22257,7 +22250,7 @@ yydefault: case 1609: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL bool -//line sql.y:7947 +//line sql.y:7937 { yyLOCAL = true } @@ -22265,7 +22258,7 @@ yydefault: case 1610: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL bool -//line sql.y:7950 +//line sql.y:7940 { yyLOCAL = false } @@ -22273,7 +22266,7 @@ yydefault: case 1611: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL bool -//line sql.y:7952 +//line sql.y:7942 { yyLOCAL = true } @@ -22281,7 +22274,7 @@ yydefault: case 1612: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Ignore -//line sql.y:7955 +//line sql.y:7945 { yyLOCAL = false } @@ -22289,33 +22282,33 @@ yydefault: case 1613: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Ignore -//line sql.y:7957 +//line sql.y:7947 { yyLOCAL = true } yyVAL.union = yyLOCAL case 1614: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:7960 +//line sql.y:7950 { yyVAL.empty = struct{}{} } case 1615: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7962 +//line sql.y:7952 { yyVAL.empty = struct{}{} } case 1616: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7964 +//line sql.y:7954 { yyVAL.empty = struct{}{} } case 1617: yyDollar = yyS[yypt-5 : yypt+1] var yyLOCAL Statement -//line sql.y:7968 +//line sql.y:7958 { yyLOCAL = &CallProc{Name: yyDollar[2].tableName, Params: yyDollar[4].exprsUnion()} } @@ -22323,7 +22316,7 @@ yydefault: case 1618: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Exprs -//line sql.y:7973 +//line sql.y:7963 { yyLOCAL = nil } @@ -22331,7 +22324,7 @@ yydefault: case 1619: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL Exprs -//line sql.y:7977 +//line sql.y:7967 { yyLOCAL = yyDollar[1].exprsUnion() } @@ -22339,7 +22332,7 @@ yydefault: case 1620: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL []*IndexOption -//line sql.y:7982 +//line sql.y:7972 { yyLOCAL = nil } @@ -22347,7 +22340,7 @@ yydefault: case 1621: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL []*IndexOption -//line sql.y:7984 +//line sql.y:7974 { yyLOCAL = []*IndexOption{yyDollar[1].indexOptionUnion()} } @@ -22355,75 +22348,75 @@ yydefault: case 1622: yyDollar = yyS[yypt-2 : yypt+1] var yyLOCAL *IndexOption -//line sql.y:7988 +//line sql.y:7978 { yyLOCAL = &IndexOption{Name: string(yyDollar[1].str), String: string(yyDollar[2].identifierCI.String())} } yyVAL.union = yyLOCAL case 1623: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7994 +//line sql.y:7984 { yyVAL.identifierCI = yyDollar[1].identifierCI } case 1624: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:7998 +//line sql.y:7988 { yyVAL.identifierCI = NewIdentifierCI(string(yyDollar[1].str)) } case 1626: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8005 +//line sql.y:7995 { yyVAL.identifierCI = NewIdentifierCI(string(yyDollar[1].str)) } case 1627: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8011 +//line sql.y:8001 { yyVAL.identifierCS = NewIdentifierCS(string(yyDollar[1].str)) } case 1628: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8015 +//line sql.y:8005 { yyVAL.identifierCS = NewIdentifierCS(string(yyDollar[1].str)) } case 1629: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:8021 +//line sql.y:8011 { yyVAL.identifierCS = NewIdentifierCS("") } case 1630: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8025 +//line sql.y:8015 { yyVAL.identifierCS = yyDollar[1].identifierCS } case 1632: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8032 +//line sql.y:8022 { yyVAL.identifierCS = NewIdentifierCS(string(yyDollar[1].str)) } case 1633: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:8037 +//line sql.y:8027 { yyVAL.str = "" } case 1634: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8041 +//line sql.y:8031 { yyVAL.str = string(yyDollar[1].str) } case 1635: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL Columns -//line sql.y:8046 +//line sql.y:8036 { yyLOCAL = nil } @@ -22431,7 +22424,7 @@ yydefault: case 1636: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Columns -//line sql.y:8050 +//line sql.y:8040 { yyLOCAL = yyDollar[2].columnsUnion() } @@ -22439,7 +22432,7 @@ yydefault: case 1637: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8055 +//line sql.y:8045 { escape := "\\" yyLOCAL = &FieldsClause{ @@ -22451,7 +22444,7 @@ yydefault: case 1638: yyDollar = yyS[yypt-4 : yypt+1] var yyLOCAL interface{} -//line sql.y:8063 +//line sql.y:8053 { escape := yyDollar[4].itemUnion().(string) if escape != "\\" && len(escape) > 1 { @@ -22476,7 +22469,7 @@ yydefault: case 1639: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8085 +//line sql.y:8075 { yyLOCAL = &LinesClause{Terminated: "\n"} } @@ -22484,7 +22477,7 @@ yydefault: case 1640: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL interface{} -//line sql.y:8089 +//line sql.y:8079 { yyLOCAL = &LinesClause{Starting: yyDollar[2].itemUnion().(string), Terminated: yyDollar[3].itemUnion().(string)} } @@ -22492,7 +22485,7 @@ yydefault: case 1641: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8094 +//line sql.y:8084 { yyLOCAL = "\t" } @@ -22500,7 +22493,7 @@ yydefault: case 1642: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL interface{} -//line sql.y:8098 +//line sql.y:8088 { yyLOCAL = string(yyDollar[3].str) } @@ -22508,7 +22501,7 @@ yydefault: case 1643: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8103 +//line sql.y:8093 { yyLOCAL = "" } @@ -22516,7 +22509,7 @@ yydefault: case 1644: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL interface{} -//line sql.y:8107 +//line sql.y:8097 { yyLOCAL = string(yyDollar[3].str) } @@ -22524,7 +22517,7 @@ yydefault: case 1645: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8112 +//line sql.y:8102 { yyLOCAL = "\\" } @@ -22532,7 +22525,7 @@ yydefault: case 1646: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL interface{} -//line sql.y:8116 +//line sql.y:8106 { yyLOCAL = string(yyDollar[3].str) } @@ -22540,7 +22533,7 @@ yydefault: case 1647: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8121 +//line sql.y:8111 { yyLOCAL = "" } @@ -22548,7 +22541,7 @@ yydefault: case 1648: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL interface{} -//line sql.y:8125 +//line sql.y:8115 { yyLOCAL = string(yyDollar[3].str) @@ -22557,7 +22550,7 @@ yydefault: case 1649: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL interface{} -//line sql.y:8131 +//line sql.y:8121 { yyLOCAL = "\n" } @@ -22565,7 +22558,7 @@ yydefault: case 1650: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL interface{} -//line sql.y:8135 +//line sql.y:8125 { yyLOCAL = string(yyDollar[3].str) } @@ -22573,7 +22566,7 @@ yydefault: case 1653: yyDollar = yyS[yypt-11 : yypt+1] var yyLOCAL Statement -//line sql.y:8144 +//line sql.y:8134 { x := &LoadDataStmt{ Action: string(yyDollar[1].str), @@ -22596,7 +22589,7 @@ yydefault: case 1654: yyDollar = yyS[yypt-3 : yypt+1] var yyLOCAL Statement -//line sql.y:8165 +//line sql.y:8155 { yyLOCAL = &Kill{Type: yyDollar[2].killTypeUnion(), ProcesslistID: convertStringToUInt64(yyDollar[3].str)} } @@ -22604,7 +22597,7 @@ yydefault: case 1655: yyDollar = yyS[yypt-0 : yypt+1] var yyLOCAL KillType -//line sql.y:8171 +//line sql.y:8161 { yyLOCAL = ConnectionType } @@ -22612,7 +22605,7 @@ yydefault: case 1656: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL KillType -//line sql.y:8175 +//line sql.y:8165 { yyLOCAL = ConnectionType } @@ -22620,42 +22613,42 @@ yydefault: case 1657: yyDollar = yyS[yypt-1 : yypt+1] var yyLOCAL KillType -//line sql.y:8179 +//line sql.y:8169 { yyLOCAL = QueryType } yyVAL.union = yyLOCAL case 2277: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8827 +//line sql.y:8817 { } case 2278: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8832 +//line sql.y:8822 { } case 2279: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:8836 +//line sql.y:8826 { skipToEnd(yylex) } case 2280: yyDollar = yyS[yypt-0 : yypt+1] -//line sql.y:8841 +//line sql.y:8831 { skipToEnd(yylex) } case 2281: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8845 +//line sql.y:8835 { skipToEnd(yylex) } case 2282: yyDollar = yyS[yypt-1 : yypt+1] -//line sql.y:8849 +//line sql.y:8839 { skipToEnd(yylex) } diff --git a/go/vt/sqlparser/sql.y b/go/vt/sqlparser/sql.y index f0f4376e918..8b796f7d8ba 100644 --- a/go/vt/sqlparser/sql.y +++ b/go/vt/sqlparser/sql.y @@ -16,6 +16,8 @@ limitations under the License. %{ package sqlparser +import "vitess.io/vitess/go/ptr" + func setParseTree(yylex yyLexer, stmt Statement) { yylex.(*Tokenizer).ParseTree = stmt } @@ -192,6 +194,7 @@ func markBindVariable(yylex yyLexer, bvar string) { partitionByType PartitionByType definer *Definer integer int + intPtr *int JSONTableExpr *JSONTableExpr jtColumnDefinition *JtColumnDefinition @@ -543,7 +546,8 @@ func markBindVariable(yylex yyLexer, bvar string) { %type array_opt %type column_type %type int_type decimal_type numeric_type time_type char_type spatial_type -%type length_opt partition_comment partition_data_directory partition_index_directory +%type partition_comment partition_data_directory partition_index_directory +%type length_opt %type func_datetime_precision %type charset_opt %type collate_opt @@ -1482,14 +1486,12 @@ column_attribute_list_opt: } | column_attribute_list_opt NULL { - val := true - $1.Null = &val + $1.Null = ptr.Of(true) $$ = $1 } | column_attribute_list_opt NOT NULL { - val := false - $1.Null = &val + $1.Null = ptr.Of(false) $$ = $1 } | column_attribute_list_opt DEFAULT openb expression closeb @@ -1543,14 +1545,12 @@ column_attribute_list_opt: } | column_attribute_list_opt VISIBLE { - val := false - $1.Invisible = &val + $1.Invisible = ptr.Of(false) $$ = $1 } | column_attribute_list_opt INVISIBLE { - val := true - $1.Invisible = &val + $1.Invisible = ptr.Of(true) $$ = $1 } | column_attribute_list_opt ENGINE_ATTRIBUTE equal_opt STRING @@ -1597,14 +1597,12 @@ generated_column_attribute_list_opt: } | generated_column_attribute_list_opt NULL { - val := true - $1.Null = &val + $1.Null = ptr.Of(true) $$ = $1 } | generated_column_attribute_list_opt NOT NULL { - val := false - $1.Null = &val + $1.Null = ptr.Of(false) $$ = $1 } | generated_column_attribute_list_opt COMMENT_KEYWORD STRING @@ -1619,14 +1617,12 @@ generated_column_attribute_list_opt: } | generated_column_attribute_list_opt VISIBLE { - val := false - $1.Invisible = &val + $1.Invisible = ptr.Of(false) $$ = $1 } | generated_column_attribute_list_opt INVISIBLE { - val := true - $1.Invisible = &val + $1.Invisible = ptr.Of(true) $$ = $1 } @@ -2231,7 +2227,7 @@ length_opt: } | '(' INTEGRAL ')' { - $$ = NewIntLiteral($2) + $$ = ptr.Of(convertStringToInt($2)) } double_length_opt: @@ -2241,8 +2237,8 @@ double_length_opt: | '(' INTEGRAL ',' INTEGRAL ')' { $$ = LengthScaleOption{ - Length: NewIntLiteral($2), - Scale: NewIntLiteral($4), + Length: ptr.Of(convertStringToInt($2)), + Scale: ptr.Of(convertStringToInt($4)), } } @@ -2254,7 +2250,7 @@ double_length_opt | '(' INTEGRAL ')' { $$ = LengthScaleOption{ - Length: NewIntLiteral($2), + Length: ptr.Of(convertStringToInt($2)), } } @@ -2265,14 +2261,14 @@ decimal_length_opt: | '(' INTEGRAL ')' { $$ = LengthScaleOption{ - Length: NewIntLiteral($2), + Length: ptr.Of(convertStringToInt($2)), } } | '(' INTEGRAL ',' INTEGRAL ')' { $$ = LengthScaleOption{ - Length: NewIntLiteral($2), - Scale: NewIntLiteral($4), + Length: ptr.Of(convertStringToInt($2)), + Scale: ptr.Of(convertStringToInt($4)), } } @@ -3050,13 +3046,11 @@ alter_option: } | ALTER column_opt column_name SET VISIBLE { - val := false - $$ = &AlterColumn{Column: $3, Invisible:&val} + $$ = &AlterColumn{Column: $3, Invisible: ptr.Of(false)} } | ALTER column_opt column_name SET INVISIBLE { - val := true - $$ = &AlterColumn{Column: $3, Invisible:&val} + $$ = &AlterColumn{Column: $3, Invisible: ptr.Of(true)} } | ALTER CHECK ci_identifier enforced { @@ -3885,14 +3879,12 @@ partition_definition_attribute_list_opt: } | partition_definition_attribute_list_opt partition_max_rows { - val := $2 - $1.MaxRows = &val + $1.MaxRows = ptr.Of($2) $$ = $1 } | partition_definition_attribute_list_opt partition_min_rows { - val := $2 - $1.MinRows = &val + $1.MinRows = ptr.Of($2) $$ = $1 } | partition_definition_attribute_list_opt partition_tablespace_name @@ -3953,14 +3945,12 @@ subpartition_definition_attribute_list_opt: } | subpartition_definition_attribute_list_opt partition_max_rows { - val := $2 - $1.MaxRows = &val + $1.MaxRows = ptr.Of($2) $$ = $1 } | subpartition_definition_attribute_list_opt partition_min_rows { - val := $2 - $1.MinRows = &val + $1.MinRows = ptr.Of($2) $$ = $1 } | subpartition_definition_attribute_list_opt partition_tablespace_name @@ -7201,11 +7191,11 @@ convert_type_weight_string: } | AS BINARY '(' INTEGRAL ')' { - $$ = &ConvertType{Type: string($2), Length: NewIntLiteral($4)} + $$ = &ConvertType{Type: string($2), Length: ptr.Of(convertStringToInt($4))} } | AS CHAR '(' INTEGRAL ')' { - $$ = &ConvertType{Type: string($2), Length: NewIntLiteral($4)} + $$ = &ConvertType{Type: string($2), Length: ptr.Of(convertStringToInt($4))} } convert_type: diff --git a/go/vt/sqlparser/utils.go b/go/vt/sqlparser/utils.go index 0f3c66f2ea3..e78651a53db 100644 --- a/go/vt/sqlparser/utils.go +++ b/go/vt/sqlparser/utils.go @@ -19,6 +19,7 @@ package sqlparser import ( "fmt" "sort" + "strings" querypb "vitess.io/vitess/go/vt/proto/query" ) @@ -160,3 +161,22 @@ func ReplaceTableQualifiers(query, olddb, newdb string) (string, error) { } return query, nil } + +// ReplaceTableQualifiersMultiQuery accepts a multi-query string and modifies it +// via ReplaceTableQualifiers, one query at a time. +func ReplaceTableQualifiersMultiQuery(multiQuery, olddb, newdb string) (string, error) { + queries, err := SplitStatementToPieces(multiQuery) + if err != nil { + return multiQuery, err + } + var modifiedQueries []string + for _, query := range queries { + // Replace any provided sidecar database qualifiers with the correct one. + query, err := ReplaceTableQualifiers(query, olddb, newdb) + if err != nil { + return query, err + } + modifiedQueries = append(modifiedQueries, query) + } + return strings.Join(modifiedQueries, ";"), nil +} diff --git a/go/vt/sqlparser/utils_test.go b/go/vt/sqlparser/utils_test.go index 63c9b10ba43..8a288dfbbcd 100644 --- a/go/vt/sqlparser/utils_test.go +++ b/go/vt/sqlparser/utils_test.go @@ -275,3 +275,71 @@ func TestReplaceTableQualifiers(t *testing.T) { }) } } + +func TestReplaceTableQualifiersMultiQuery(t *testing.T) { + origDB := "_vt" + tests := []struct { + name string + in string + newdb string + out string + wantErr bool + }{ + { + name: "invalid select", + in: "select frog bar person", + out: "", + wantErr: true, + }, + { + name: "simple select", + in: "select * from _vt.foo", + out: "select * from foo", + }, + { + name: "simple select with new db", + in: "select * from _vt.foo", + newdb: "_vt_test", + out: "select * from _vt_test.foo", + }, + { + name: "simple select with new db same", + in: "select * from _vt.foo where id=1", // should be unchanged + newdb: "_vt", + out: "select * from _vt.foo where id=1", + }, + { + name: "simple select with new db needing escaping", + in: "select * from _vt.foo", + newdb: "1_vt-test", + out: "select * from `1_vt-test`.foo", + }, + { + name: "multi query", + in: "select * from _vt.foo ; select * from _vt.bar", + out: "select * from foo;select * from bar", + }, + { + name: "multi query with new db", + in: "select * from _vt.foo ; select * from _vt.bar", + newdb: "_vt_test", + out: "select * from _vt_test.foo;select * from _vt_test.bar", + }, + { + name: "multi query with error", + in: "select * from _vt.foo ; select * from _vt.bar ; sel ect fr om wh at", + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ReplaceTableQualifiersMultiQuery(tt.in, origDB, tt.newdb) + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + } + require.Equal(t, tt.out, got, "RemoveTableQualifiers(); in: %s, out: %s", tt.in, got) + }) + } +} diff --git a/go/vt/srvtopo/watch.go b/go/vt/srvtopo/watch.go index 36d8fd428bd..4a0ccda2d59 100644 --- a/go/vt/srvtopo/watch.go +++ b/go/vt/srvtopo/watch.go @@ -23,6 +23,7 @@ import ( "time" "vitess.io/vitess/go/stats" + "vitess.io/vitess/go/timer" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/topo" ) @@ -204,8 +205,11 @@ func (entry *watchEntry) onErrorLocked(ctx context.Context, err error, init bool entry.value = nil } } else { - entry.lastError = fmt.Errorf("ResilientWatch stream failed for %v: %w", entry.key, err) - log.Errorf("%v", entry.lastError) + if !topo.IsErrType(err, topo.Interrupted) { + // No need to log if we're explicitly interrupted. + entry.lastError = fmt.Errorf("ResilientWatch stream failed for %v: %w", entry.key, err) + log.Errorf("%v", entry.lastError) + } // Even though we didn't get a new value, update the lastValueTime // here since the watch was successfully running before and we want @@ -224,8 +228,7 @@ func (entry *watchEntry) onErrorLocked(ctx context.Context, err error, init bool if len(entry.listeners) > 0 && !topo.IsErrType(err, topo.Interrupted) { go func() { - time.Sleep(entry.rw.cacheRefreshInterval) - + _ = timer.SleepContext(ctx, entry.rw.cacheRefreshInterval) entry.mutex.Lock() entry.ensureWatchingLocked(ctx) entry.mutex.Unlock() diff --git a/go/vt/srvtopo/watch_srvkeyspace.go b/go/vt/srvtopo/watch_srvkeyspace.go index cefe95c6951..ac2d8c0bac1 100644 --- a/go/vt/srvtopo/watch_srvkeyspace.go +++ b/go/vt/srvtopo/watch_srvkeyspace.go @@ -40,7 +40,7 @@ func (k *srvKeyspaceKey) String() string { func NewSrvKeyspaceWatcher(ctx context.Context, topoServer *topo.Server, counts *stats.CountersWithSingleLabel, cacheRefresh, cacheTTL time.Duration) *SrvKeyspaceWatcher { watch := func(entry *watchEntry) { key := entry.key.(*srvKeyspaceKey) - requestCtx, requestCancel := context.WithCancel(context.Background()) + requestCtx, requestCancel := context.WithCancel(ctx) defer requestCancel() current, changes, err := topoServer.WatchSrvKeyspace(requestCtx, key.cell, key.keyspace) diff --git a/go/vt/throttler/throttler.go b/go/vt/throttler/throttler.go index 83a1c52225e..a83e8def880 100644 --- a/go/vt/throttler/throttler.go +++ b/go/vt/throttler/throttler.go @@ -33,6 +33,8 @@ import ( "sync" "time" + "vitess.io/vitess/go/vt/proto/topodata" + "vitess.io/vitess/go/vt/discovery" "vitess.io/vitess/go/vt/log" @@ -224,6 +226,28 @@ func (t *Throttler) Throttle(threadID int) time.Duration { return t.threadThrottlers[threadID].throttle(t.nowFunc()) } +// MaxLag returns the max of all the last replication lag values seen across all tablets of +// the provided type, excluding ignored tablets. +func (t *Throttler) MaxLag(tabletType topodata.TabletType) uint32 { + cache := t.maxReplicationLagModule.lagCacheByType(tabletType) + + var maxLag uint32 + cacheEntries := cache.entries + + for key := range cacheEntries { + if cache.isIgnored(key) { + continue + } + + lag := cache.latest(key).Stats.ReplicationLagSeconds + if lag > maxLag { + maxLag = lag + } + } + + return maxLag +} + // ThreadFinished marks threadID as finished and redistributes the thread's // rate allotment across the other threads. // After ThreadFinished() is called, Throttle() must not be called anymore. diff --git a/go/vt/topo/memorytopo/election.go b/go/vt/topo/memorytopo/election.go index 868a2c53287..ad173695099 100644 --- a/go/vt/topo/memorytopo/election.go +++ b/go/vt/topo/memorytopo/election.go @@ -26,7 +26,7 @@ import ( // NewLeaderParticipation is part of the topo.Server interface func (c *Conn) NewLeaderParticipation(name, id string) (topo.LeaderParticipation, error) { - if c.closed { + if c.closed.Load() { return nil, ErrConnectionClosed } @@ -72,7 +72,7 @@ type cLeaderParticipation struct { // WaitForLeadership is part of the topo.LeaderParticipation interface. func (mp *cLeaderParticipation) WaitForLeadership() (context.Context, error) { - if mp.c.closed { + if mp.c.closed.Load() { return nil, ErrConnectionClosed } @@ -120,7 +120,7 @@ func (mp *cLeaderParticipation) Stop() { // GetCurrentLeaderID is part of the topo.LeaderParticipation interface func (mp *cLeaderParticipation) GetCurrentLeaderID(ctx context.Context) (string, error) { - if mp.c.closed { + if mp.c.closed.Load() { return "", ErrConnectionClosed } @@ -139,7 +139,7 @@ func (mp *cLeaderParticipation) GetCurrentLeaderID(ctx context.Context) (string, // WaitForNewLeader is part of the topo.LeaderParticipation interface func (mp *cLeaderParticipation) WaitForNewLeader(ctx context.Context) (<-chan string, error) { - if mp.c.closed { + if mp.c.closed.Load() { return nil, ErrConnectionClosed } diff --git a/go/vt/topo/memorytopo/lock.go b/go/vt/topo/memorytopo/lock.go index c15fb9099bb..5c2a2462495 100644 --- a/go/vt/topo/memorytopo/lock.go +++ b/go/vt/topo/memorytopo/lock.go @@ -112,7 +112,7 @@ func (ld *memoryTopoLockDescriptor) Unlock(ctx context.Context) error { } func (c *Conn) unlock(ctx context.Context, dirPath string) error { - if c.closed { + if c.closed.Load() { return ErrConnectionClosed } diff --git a/go/vt/topo/memorytopo/memorytopo.go b/go/vt/topo/memorytopo/memorytopo.go index ae33bb73edd..12dcf4a98cb 100644 --- a/go/vt/topo/memorytopo/memorytopo.go +++ b/go/vt/topo/memorytopo/memorytopo.go @@ -25,6 +25,7 @@ import ( "math/rand" "strings" "sync" + "sync/atomic" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/topo" @@ -126,13 +127,13 @@ type Conn struct { factory *Factory cell string serverAddr string - closed bool + closed atomic.Bool } // dial returns immediately, unless the Conn points to the sentinel // UnreachableServerAddr, in which case it will block until the context expires. func (c *Conn) dial(ctx context.Context) error { - if c.closed { + if c.closed.Load() { return ErrConnectionClosed } if c.serverAddr == UnreachableServerAddr { @@ -144,7 +145,7 @@ func (c *Conn) dial(ctx context.Context) error { // Close is part of the topo.Conn interface. func (c *Conn) Close() { - c.closed = true + c.closed.Store(true) } type watch struct { diff --git a/go/vt/topo/memorytopo/watch.go b/go/vt/topo/memorytopo/watch.go index 73b2d248434..0f245c95b5f 100644 --- a/go/vt/topo/memorytopo/watch.go +++ b/go/vt/topo/memorytopo/watch.go @@ -25,7 +25,7 @@ import ( // Watch is part of the topo.Conn interface. func (c *Conn) Watch(ctx context.Context, filePath string) (*topo.WatchData, <-chan *topo.WatchData, error) { - if c.closed { + if c.closed.Load() { return nil, nil, ErrConnectionClosed } @@ -75,7 +75,7 @@ func (c *Conn) Watch(ctx context.Context, filePath string) (*topo.WatchData, <-c // WatchRecursive is part of the topo.Conn interface. func (c *Conn) WatchRecursive(ctx context.Context, dirpath string) ([]*topo.WatchDataRecursive, <-chan *topo.WatchDataRecursive, error) { - if c.closed { + if c.closed.Load() { return nil, nil, ErrConnectionClosed } diff --git a/go/vt/vtadmin/api.go b/go/vt/vtadmin/api.go index 92d11ba18ea..d12081d4073 100644 --- a/go/vt/vtadmin/api.go +++ b/go/vt/vtadmin/api.go @@ -37,6 +37,7 @@ import ( "vitess.io/vitess/go/vt/concurrency" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/topo" + "vitess.io/vitess/go/vt/topo/memorytopo" "vitess.io/vitess/go/vt/topo/topoproto" "vitess.io/vitess/go/vt/vtadmin/cluster" "vitess.io/vitess/go/vt/vtadmin/cluster/dynamic" @@ -2148,7 +2149,8 @@ func (api *API) VTExplain(ctx context.Context, req *vtadminpb.VTExplainRequest) return nil, er.Error() } - vte, err := vtexplain.Init(ctx, srvVSchema, schema, shardMap, &vtexplain.Options{ReplicationMode: "ROW"}) + ts := memorytopo.NewServer(ctx, vtexplain.Cell) + vte, err := vtexplain.Init(ctx, ts, srvVSchema, schema, shardMap, &vtexplain.Options{ReplicationMode: "ROW"}) if err != nil { return nil, fmt.Errorf("error initilaizing vtexplain: %w", err) } diff --git a/go/vt/vtctl/workflow/server.go b/go/vt/vtctl/workflow/server.go index 7182020e36b..0945331d907 100644 --- a/go/vt/vtctl/workflow/server.go +++ b/go/vt/vtctl/workflow/server.go @@ -3182,6 +3182,20 @@ func (s *Server) switchWrites(ctx context.Context, req *vtctldatapb.WorkflowSwit sw.cancelMigration(ctx, sm) return handleError("failed to create the reverse vreplication streams", err) } + + // Initialize any target sequences, if there are any, before allowing new writes. + if req.InitializeTargetSequences && len(sequenceMetadata) > 0 { + ts.Logger().Infof("Initializing target sequences") + // Writes are blocked so we can safely initialize the sequence tables but + // we also want to use a shorter timeout than the parent context. + // We use at most half of the overall timeout. + initSeqCtx, cancel := context.WithTimeout(ctx, timeout/2) + defer cancel() + if err := sw.initializeTargetSequences(initSeqCtx, sequenceMetadata); err != nil { + sw.cancelMigration(ctx, sm) + return handleError(fmt.Sprintf("failed to initialize the sequences used in the %s keyspace", ts.TargetKeyspaceName()), err) + } + } } else { if cancel { return handleError("invalid cancel", vterrors.Errorf(vtrpcpb.Code_FAILED_PRECONDITION, "traffic switching has reached the point of no return, cannot cancel")) @@ -3198,17 +3212,6 @@ func (s *Server) switchWrites(ctx context.Context, req *vtctldatapb.WorkflowSwit if err := sw.createJournals(ctx, sourceWorkflows); err != nil { return handleError("failed to create the journal", err) } - // Initialize any target sequences, if there are any, before allowing new writes. - if req.InitializeTargetSequences && len(sequenceMetadata) > 0 { - // Writes are blocked so we can safely initialize the sequence tables but - // we also want to use a shorter timeout than the parent context. - // We use up at most half of the overall timeout. - initSeqCtx, cancel := context.WithTimeout(ctx, timeout/2) - defer cancel() - if err := sw.initializeTargetSequences(initSeqCtx, sequenceMetadata); err != nil { - return handleError(fmt.Sprintf("failed to initialize the sequences used in the %s keyspace", ts.TargetKeyspaceName()), err) - } - } if err := sw.allowTargetWrites(ctx); err != nil { return handleError(fmt.Sprintf("failed to allow writes in the %s keyspace", ts.TargetKeyspaceName()), err) } diff --git a/go/vt/vtctl/workflow/traffic_switcher.go b/go/vt/vtctl/workflow/traffic_switcher.go index db932d2d4e4..ce18e55fc51 100644 --- a/go/vt/vtctl/workflow/traffic_switcher.go +++ b/go/vt/vtctl/workflow/traffic_switcher.go @@ -38,6 +38,7 @@ import ( "vitess.io/vitess/go/vt/logutil" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/topo" + "vitess.io/vitess/go/vt/topo/topoproto" "vitess.io/vitess/go/vt/topotools" "vitess.io/vitess/go/vt/vterrors" "vitess.io/vitess/go/vt/vtgate/vindexes" @@ -1423,13 +1424,17 @@ func (ts *trafficSwitcher) initializeTargetSequences(ctx context.Context, sequen MaxRows: 1, }) if terr != nil || len(qr.Rows) != 1 { - return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s in order to initialize the backing sequence table: %v", - ts.targetKeyspace, sequenceMetadata.usingTableName, terr) + return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s on tablet %s in order to initialize the backing sequence table: %v", + ts.targetKeyspace, sequenceMetadata.usingTableName, topoproto.TabletAliasString(primary.Alias), terr) } - maxID, terr := sqltypes.Proto3ToResult(qr).Rows[0][0].ToInt64() - if terr != nil { - return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s in order to initialize the backing sequence table: %v", - ts.targetKeyspace, sequenceMetadata.usingTableName, terr) + rawVal := sqltypes.Proto3ToResult(qr).Rows[0][0] + maxID := int64(0) + if !rawVal.IsNull() { // If it's NULL then there are no rows and 0 remains the max + maxID, terr = rawVal.ToInt64() + if terr != nil { + return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s on tablet %s in order to initialize the backing sequence table: %v", + ts.targetKeyspace, sequenceMetadata.usingTableName, topoproto.TabletAliasString(primary.Alias), terr) + } } srMu.Lock() defer srMu.Unlock() diff --git a/go/vt/vterrors/state.go b/go/vt/vterrors/state.go index 5e3dcf22dfb..a1c6ebef3c9 100644 --- a/go/vt/vterrors/state.go +++ b/go/vt/vterrors/state.go @@ -47,6 +47,7 @@ const ( WrongValueCountOnRow WrongValue WrongArguments + InvalidGroupFuncUse // failed precondition NoDB diff --git a/go/vt/vtexplain/vtexplain.go b/go/vt/vtexplain/vtexplain.go index 55e76606e08..43ad99e9d33 100644 --- a/go/vt/vtexplain/vtexplain.go +++ b/go/vt/vtexplain/vtexplain.go @@ -31,6 +31,7 @@ import ( "vitess.io/vitess/go/vt/discovery" "vitess.io/vitess/go/vt/servenv" + "vitess.io/vitess/go/vt/topo" "vitess.io/vitess/go/vt/vtgate" "vitess.io/vitess/go/jsonutil" @@ -54,7 +55,7 @@ func init() { } const ( - vtexplainCell = "explainCell" + Cell = "explainCell" // ModeMulti is the default mode with autocommit implemented at vtgate ModeMulti = "multi" @@ -181,7 +182,7 @@ type TabletActions struct { } // Init sets up the fake execution environment -func Init(ctx context.Context, vSchemaStr, sqlSchema, ksShardMapStr string, opts *Options) (*VTExplain, error) { +func Init(ctx context.Context, ts *topo.Server, vSchemaStr, sqlSchema, ksShardMapStr string, opts *Options) (*VTExplain, error) { // Verify options if opts.ReplicationMode != "ROW" && opts.ReplicationMode != "STATEMENT" { return nil, fmt.Errorf("invalid replication mode \"%s\"", opts.ReplicationMode) @@ -201,7 +202,7 @@ func Init(ctx context.Context, vSchemaStr, sqlSchema, ksShardMapStr string, opts Autocommit: true, }} vte.setGlobalTabletEnv(tabletEnv) - err = vte.initVtgateExecutor(ctx, vSchemaStr, ksShardMapStr, opts) + err = vte.initVtgateExecutor(ctx, ts, vSchemaStr, ksShardMapStr, opts) if err != nil { return nil, fmt.Errorf("initVtgateExecutor: %v", err.Error()) } diff --git a/go/vt/vtexplain/vtexplain_test.go b/go/vt/vtexplain/vtexplain_test.go index 54f1efbc522..b1e93855081 100644 --- a/go/vt/vtexplain/vtexplain_test.go +++ b/go/vt/vtexplain/vtexplain_test.go @@ -29,6 +29,7 @@ import ( "github.com/stretchr/testify/require" "vitess.io/vitess/go/test/utils" + "vitess.io/vitess/go/vt/topo/memorytopo" "vitess.io/vitess/go/vt/key" "vitess.io/vitess/go/vt/proto/topodata" @@ -49,7 +50,7 @@ type testopts struct { shardmap map[string]map[string]*topo.ShardInfo } -func initTest(ctx context.Context, mode string, opts *Options, topts *testopts, t *testing.T) *VTExplain { +func initTest(ctx context.Context, ts *topo.Server, mode string, opts *Options, topts *testopts, t *testing.T) *VTExplain { schema, err := os.ReadFile("testdata/test-schema.sql") require.NoError(t, err) @@ -65,7 +66,7 @@ func initTest(ctx context.Context, mode string, opts *Options, topts *testopts, } opts.ExecutionMode = mode - vte, err := Init(ctx, string(vSchema), string(schema), shardmap, opts) + vte, err := Init(ctx, ts, string(vSchema), string(schema), shardmap, opts) require.NoError(t, err, "vtexplain Init error\n%s", string(schema)) return vte } @@ -88,7 +89,8 @@ func runTestCase(testcase, mode string, opts *Options, topts *testopts, t *testi t.Run(testcase, func(t *testing.T) { ctx := utils.LeakCheckContext(t) - vte := initTest(ctx, mode, opts, topts, t) + ts := memorytopo.NewServer(ctx, Cell) + vte := initTest(ctx, ts, mode, opts, topts, t) defer vte.Stop() sqlFile := fmt.Sprintf("testdata/%s-queries.sql", testcase) @@ -154,8 +156,8 @@ func TestExplain(t *testing.T) { func TestErrors(t *testing.T) { ctx := utils.LeakCheckContext(t) - - vte := initTest(ctx, ModeMulti, defaultTestOpts(), &testopts{}, t) + ts := memorytopo.NewServer(ctx, Cell) + vte := initTest(ctx, ts, ModeMulti, defaultTestOpts(), &testopts{}, t) defer vte.Stop() tests := []struct { @@ -194,8 +196,8 @@ func TestErrors(t *testing.T) { func TestJSONOutput(t *testing.T) { ctx := utils.LeakCheckContext(t) - - vte := initTest(ctx, ModeMulti, defaultTestOpts(), &testopts{}, t) + ts := memorytopo.NewServer(ctx, Cell) + vte := initTest(ctx, ts, ModeMulti, defaultTestOpts(), &testopts{}, t) defer vte.Stop() sql := "select 1 from user where id = 1" explains, err := vte.Run(sql) @@ -344,7 +346,8 @@ func TestInit(t *testing.T) { } }` schema := "create table table_missing_primary_vindex (id int primary key)" - _, err := Init(ctx, vschema, schema, "", defaultTestOpts()) + ts := memorytopo.NewServer(ctx, Cell) + _, err := Init(ctx, ts, vschema, schema, "", defaultTestOpts()) require.Error(t, err) require.Contains(t, err.Error(), "missing primary col vindex") } diff --git a/go/vt/vtexplain/vtexplain_vtgate.go b/go/vt/vtexplain/vtexplain_vtgate.go index aa219fdb1eb..107e86dac33 100644 --- a/go/vt/vtexplain/vtexplain_vtgate.go +++ b/go/vt/vtexplain/vtexplain_vtgate.go @@ -22,26 +22,23 @@ package vtexplain import ( "context" "fmt" + "path" "sort" "strings" "vitess.io/vitess/go/cache/theine" - "vitess.io/vitess/go/vt/vtgate/logstats" - "vitess.io/vitess/go/vt/vtgate/vindexes" - - "vitess.io/vitess/go/vt/topo" - "vitess.io/vitess/go/vt/topo/memorytopo" - - "vitess.io/vitess/go/vt/vterrors" - "vitess.io/vitess/go/json2" "vitess.io/vitess/go/streamlog" "vitess.io/vitess/go/vt/discovery" "vitess.io/vitess/go/vt/key" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/srvtopo" + "vitess.io/vitess/go/vt/topo" + "vitess.io/vitess/go/vt/vterrors" "vitess.io/vitess/go/vt/vtgate" "vitess.io/vitess/go/vt/vtgate/engine" + "vitess.io/vitess/go/vt/vtgate/logstats" + "vitess.io/vitess/go/vt/vtgate/vindexes" "vitess.io/vitess/go/vt/vttablet/queryservice" querypb "vitess.io/vitess/go/vt/proto/query" @@ -50,14 +47,14 @@ import ( vtgatepb "vitess.io/vitess/go/vt/proto/vtgate" ) -func (vte *VTExplain) initVtgateExecutor(ctx context.Context, vSchemaStr, ksShardMapStr string, opts *Options) error { +func (vte *VTExplain) initVtgateExecutor(ctx context.Context, ts *topo.Server, vSchemaStr, ksShardMapStr string, opts *Options) error { vte.explainTopo = &ExplainTopo{NumShards: opts.NumShards} - vte.explainTopo.TopoServer = memorytopo.NewServer(ctx, vtexplainCell) + vte.explainTopo.TopoServer = ts vte.healthCheck = discovery.NewFakeHealthCheck(nil) - resolver := vte.newFakeResolver(ctx, opts, vte.explainTopo, vtexplainCell) + resolver := vte.newFakeResolver(ctx, opts, vte.explainTopo, Cell) - err := vte.buildTopology(ctx, opts, vSchemaStr, ksShardMapStr, opts.NumShards) + err := vte.buildTopology(ctx, ts, opts, vSchemaStr, ksShardMapStr, opts.NumShards) if err != nil { return err } @@ -75,7 +72,7 @@ func (vte *VTExplain) initVtgateExecutor(ctx context.Context, vSchemaStr, ksShar var schemaTracker vtgate.SchemaInfo // no schema tracker for these tests queryLogBufferSize := 10 plans := theine.NewStore[vtgate.PlanCacheKey, *engine.Plan](4*1024*1024, false) - vte.vtgateExecutor = vtgate.NewExecutor(ctx, vte.explainTopo, vtexplainCell, resolver, opts.Normalize, false, streamSize, plans, schemaTracker, false, opts.PlannerVersion) + vte.vtgateExecutor = vtgate.NewExecutor(ctx, vte.explainTopo, Cell, resolver, opts.Normalize, false, streamSize, plans, schemaTracker, false, opts.PlannerVersion) vte.vtgateExecutor.SetQueryLogger(streamlog.New[*logstats.LogStats]("VTGate", queryLogBufferSize)) return nil @@ -95,7 +92,7 @@ func (vte *VTExplain) newFakeResolver(ctx context.Context, opts *Options, serv s return vtgate.NewResolver(srvResolver, serv, cell, sc) } -func (vte *VTExplain) buildTopology(ctx context.Context, opts *Options, vschemaStr string, ksShardMapStr string, numShardsPerKeyspace int) error { +func (vte *VTExplain) buildTopology(ctx context.Context, ts *topo.Server, opts *Options, vschemaStr string, ksShardMapStr string, numShardsPerKeyspace int) error { vte.explainTopo.Lock.Lock() defer vte.explainTopo.Lock.Unlock() @@ -120,6 +117,10 @@ func (vte *VTExplain) buildTopology(ctx context.Context, opts *Options, vschemaS return err } + conn, err := ts.ConnForCell(ctx, Cell) + if err != nil { + return err + } vte.explainTopo.TabletConns = make(map[string]*explainTablet) vte.explainTopo.KeyspaceShards = make(map[string]map[string]*topodatapb.ShardReference) for ks, vschema := range vte.explainTopo.Keyspaces { @@ -130,6 +131,32 @@ func (vte *VTExplain) buildTopology(ctx context.Context, opts *Options, vschemaS vte.explainTopo.KeyspaceShards[ks] = make(map[string]*topodatapb.ShardReference) + srvPath := path.Join(topo.KeyspacesPath, ks, topo.SrvKeyspaceFile) + srvKeyspace := &topodatapb.SrvKeyspace{ + Partitions: []*topodatapb.SrvKeyspace_KeyspacePartition{ + { + ServedType: topodatapb.TabletType_PRIMARY, + ShardReferences: shards, + }, + { + ServedType: topodatapb.TabletType_REPLICA, + ShardReferences: shards, + }, + { + ServedType: topodatapb.TabletType_RDONLY, + ShardReferences: shards, + }, + }, + } + data, err := srvKeyspace.MarshalVT() + if err != nil { + return err + } + _, err = conn.Update(ctx, srvPath, data, nil) + if err != nil { + return err + } + for _, shard := range shards { // If the topology is in the middle of a reshard, there can be two shards covering the same key range (e.g. // both source shard 80- and target shard 80-c0 cover the keyrange 80-c0). For the purposes of explain, we @@ -142,14 +169,13 @@ func (vte *VTExplain) buildTopology(ctx context.Context, opts *Options, vschemaS hostname := fmt.Sprintf("%s/%s", ks, shard.Name) log.Infof("registering test tablet %s for keyspace %s shard %s", hostname, ks, shard.Name) - tablet := vte.healthCheck.AddFakeTablet(vtexplainCell, hostname, 1, ks, shard.Name, topodatapb.TabletType_PRIMARY, true, 1, nil, func(t *topodatapb.Tablet) queryservice.QueryService { - return vte.newTablet(ctx, opts, t) + tablet := vte.healthCheck.AddFakeTablet(Cell, hostname, 1, ks, shard.Name, topodatapb.TabletType_PRIMARY, true, 1, nil, func(t *topodatapb.Tablet) queryservice.QueryService { + return vte.newTablet(ctx, opts, t, ts) }) vte.explainTopo.TabletConns[hostname] = tablet.(*explainTablet) vte.explainTopo.KeyspaceShards[ks][shard.Name] = shard } } - return err } diff --git a/go/vt/vtexplain/vtexplain_vttablet.go b/go/vt/vtexplain/vtexplain_vttablet.go index ee222f75926..9eca051ecf7 100644 --- a/go/vt/vtexplain/vtexplain_vttablet.go +++ b/go/vt/vtexplain/vtexplain_vttablet.go @@ -24,6 +24,7 @@ import ( "sync" "vitess.io/vitess/go/vt/sidecardb" + "vitess.io/vitess/go/vt/topo" "vitess.io/vitess/go/mysql" "vitess.io/vitess/go/mysql/collations" @@ -34,7 +35,6 @@ import ( "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/mysqlctl" "vitess.io/vitess/go/vt/sqlparser" - "vitess.io/vitess/go/vt/topo/memorytopo" "vitess.io/vitess/go/vt/topo/topoproto" "vitess.io/vitess/go/vt/vtgate/evalengine" @@ -102,7 +102,7 @@ type explainTablet struct { var _ queryservice.QueryService = (*explainTablet)(nil) -func (vte *VTExplain) newTablet(ctx context.Context, opts *Options, t *topodatapb.Tablet) *explainTablet { +func (vte *VTExplain) newTablet(ctx context.Context, opts *Options, t *topodatapb.Tablet, ts *topo.Server) *explainTablet { db := fakesqldb.New(nil) sidecardb.AddSchemaInitQueries(db, true) @@ -117,7 +117,7 @@ func (vte *VTExplain) newTablet(ctx context.Context, opts *Options, t *topodatap config.EnableTableGC = false // XXX much of this is cloned from the tabletserver tests - tsv := tabletserver.NewTabletServer(ctx, topoproto.TabletAliasString(t.Alias), config, memorytopo.NewServer(ctx, ""), t.Alias) + tsv := tabletserver.NewTabletServer(ctx, topoproto.TabletAliasString(t.Alias), config, ts, t.Alias) tablet := explainTablet{db: db, tsv: tsv, vte: vte} db.Handler = &tablet @@ -454,8 +454,8 @@ func newTabletEnvironment(ddls []sqlparser.DDLStatement, opts *Options) (*tablet } tEnv.addResult(query, tEnv.getResult(likeQuery)) - likeQuery = fmt.Sprintf(mysqlctl.GetColumnNamesQuery, "database()", sqlescape.UnescapeID(likeTable)) - query = fmt.Sprintf(mysqlctl.GetColumnNamesQuery, "database()", sqlescape.UnescapeID(table)) + likeQuery = fmt.Sprintf(mysqlctl.GetColumnNamesQuery, "database()", sqltypes.EncodeStringSQL(sqlescape.UnescapeID(likeTable))) + query = fmt.Sprintf(mysqlctl.GetColumnNamesQuery, "database()", sqltypes.EncodeStringSQL(sqlescape.UnescapeID(table))) if tEnv.getResult(likeQuery) == nil { return nil, fmt.Errorf("check your schema, table[%s] doesn't exist", likeTable) } @@ -496,7 +496,7 @@ func newTabletEnvironment(ddls []sqlparser.DDLStatement, opts *Options) (*tablet tEnv.addResult("SELECT * FROM "+backtickedTable+" WHERE 1 != 1", &sqltypes.Result{ Fields: rowTypes, }) - query := fmt.Sprintf(mysqlctl.GetColumnNamesQuery, "database()", sqlescape.UnescapeID(table)) + query := fmt.Sprintf(mysqlctl.GetColumnNamesQuery, "database()", sqltypes.EncodeStringSQL(sqlescape.UnescapeID(table))) tEnv.addResult(query, &sqltypes.Result{ Fields: colTypes, Rows: colValues, @@ -598,7 +598,7 @@ func (t *explainTablet) handleSelect(query string) (*sqltypes.Result, error) { // Gen4 supports more complex queries so we now need to // handle multiple FROM clauses - tables := make([]*sqlparser.AliasedTableExpr, len(selStmt.From)) + tables := make([]*sqlparser.AliasedTableExpr, 0, len(selStmt.From)) for _, from := range selStmt.From { tables = append(tables, getTables(from)...) } diff --git a/go/vt/vtexplain/vtexplain_vttablet_test.go b/go/vt/vtexplain/vtexplain_vttablet_test.go index 614ad186224..da17a61978a 100644 --- a/go/vt/vtexplain/vtexplain_vttablet_test.go +++ b/go/vt/vtexplain/vtexplain_vttablet_test.go @@ -20,10 +20,12 @@ import ( "context" "encoding/json" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "vitess.io/vitess/go/vt/topo/memorytopo" "vitess.io/vitess/go/vt/vttablet/tabletserver/schema" topodatapb "vitess.io/vitess/go/vt/proto/topodata" @@ -70,10 +72,15 @@ create table t2 ( ctx, cancel := context.WithCancel(context.Background()) defer cancel() - vte, err := Init(ctx, testVSchema, testSchema, "", opts) + ts := memorytopo.NewServer(ctx, Cell) + vte, err := Init(ctx, ts, testVSchema, testSchema, "", opts) require.NoError(t, err) defer vte.Stop() + // Check if the correct schema query is registered. + _, found := vte.globalTabletEnv.schemaQueries["SELECT COLUMN_NAME as column_name\n\t\tFROM INFORMATION_SCHEMA.COLUMNS\n\t\tWHERE TABLE_SCHEMA = database() AND TABLE_NAME = 't1'\n\t\tORDER BY ORDINAL_POSITION"] + assert.True(t, found) + sql := "SELECT * FROM t1 INNER JOIN t2 ON t1.id = t2.id" _, err = vte.Run(sql) @@ -125,17 +132,22 @@ create table test_partitioned ( ctx, cancel := context.WithCancel(context.Background()) defer cancel() - vte := initTest(ctx, ModeMulti, defaultTestOpts(), &testopts{}, t) + ts := memorytopo.NewServer(ctx, Cell) + vte := initTest(ctx, ts, ModeMulti, defaultTestOpts(), &testopts{}, t) defer vte.Stop() tabletEnv, _ := newTabletEnvironment(ddls, defaultTestOpts()) vte.setGlobalTabletEnv(tabletEnv) tablet := vte.newTablet(ctx, defaultTestOpts(), &topodatapb.Tablet{ - Keyspace: "test_keyspace", + Keyspace: "ks_sharded", Shard: "-80", - Alias: &topodatapb.TabletAlias{}, - }) + Alias: &topodatapb.TabletAlias{ + Cell: Cell, + }, + }, ts) + + time.Sleep(10 * time.Millisecond) se := tablet.tsv.SchemaEngine() tables := se.GetSchema() diff --git a/go/vt/vtgate/engine/aggregations.go b/go/vt/vtgate/engine/aggregations.go index 8037dda37a9..63634adb87c 100644 --- a/go/vt/vtgate/engine/aggregations.go +++ b/go/vt/vtgate/engine/aggregations.go @@ -58,7 +58,6 @@ func NewAggregateParam(opcode AggregateOpcode, col int, alias string) *Aggregate Col: col, Alias: alias, WCol: -1, - Type: sqltypes.Unknown, } if opcode.NeedsComparableValues() { out.KeyCol = col @@ -75,7 +74,7 @@ func (ap *AggregateParams) String() string { if ap.WAssigned() { keyCol = fmt.Sprintf("%s|%d", keyCol, ap.WCol) } - if sqltypes.IsText(ap.Type) && ap.CollationID != collations.Unknown { + if sqltypes.IsText(ap.Type) && collations.Local().IsSupported(ap.CollationID) { keyCol += " COLLATE " + collations.Local().LookupName(ap.CollationID) } dispOrigOp := "" diff --git a/go/vt/vtgate/engine/cached_size.go b/go/vt/vtgate/engine/cached_size.go index 09d66e32453..36a9d44975a 100644 --- a/go/vt/vtgate/engine/cached_size.go +++ b/go/vt/vtgate/engine/cached_size.go @@ -53,7 +53,7 @@ func (cached *AlterVSchema) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field Keyspace *vitess.io/vitess/go/vt/vtgate/vindexes.Keyspace size += cached.Keyspace.CachedSize(true) @@ -111,7 +111,7 @@ func (cached *DBDDL) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(32) + size += int64(48) } // field name string size += hack.RuntimeAllocSize(int64(len(cached.name))) @@ -145,7 +145,7 @@ func (cached *DML) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(144) + size += int64(160) } // field Query string size += hack.RuntimeAllocSize(int64(len(cached.Query))) @@ -183,7 +183,7 @@ func (cached *Delete) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(8) + size += int64(16) } // field DML *vitess.io/vitess/go/vt/vtgate/engine.DML size += cached.DML.CachedSize(true) @@ -203,7 +203,7 @@ func (cached *Distinct) CachedSize(alloc bool) int64 { } // field CheckCols []vitess.io/vitess/go/vt/vtgate/engine.CheckCol { - size += hack.RuntimeAllocSize(int64(cap(cached.CheckCols)) * int64(22)) + size += hack.RuntimeAllocSize(int64(cap(cached.CheckCols)) * int64(24)) for _, elem := range cached.CheckCols { size += elem.CachedSize(false) } @@ -555,7 +555,7 @@ func (cached *Lock) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(64) + size += int64(80) } // field Keyspace *vitess.io/vitess/go/vt/vtgate/vindexes.Keyspace size += cached.Keyspace.CachedSize(true) @@ -622,7 +622,7 @@ func (cached *MemorySort) CachedSize(alloc bool) int64 { } // field OrderBy []vitess.io/vitess/go/vt/vtgate/engine.OrderByParams { - size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(38)) + size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(40)) } // field Input vitess.io/vitess/go/vt/vtgate/engine.Primitive if cc, ok := cached.Input.(cachedObject); ok { @@ -649,7 +649,7 @@ func (cached *MergeSort) CachedSize(alloc bool) int64 { } // field OrderBy []vitess.io/vitess/go/vt/vtgate/engine.OrderByParams { - size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(38)) + size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(40)) } return size } @@ -659,7 +659,7 @@ func (cached *OnlineDDL) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(64) + size += int64(80) } // field Keyspace *vitess.io/vitess/go/vt/vtgate/vindexes.Keyspace size += cached.Keyspace.CachedSize(true) @@ -752,7 +752,7 @@ func (cached *Projection) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(64) + size += int64(80) } // field Cols []string { @@ -782,7 +782,7 @@ func (cached *RenameFields) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(64) + size += int64(80) } // field Cols []string { @@ -807,7 +807,7 @@ func (cached *ReplaceVariables) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field Input vitess.io/vitess/go/vt/vtgate/engine.Primitive if cc, ok := cached.Input.(cachedObject); ok { @@ -821,7 +821,7 @@ func (cached *RevertMigration) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(48) + size += int64(64) } // field Keyspace *vitess.io/vitess/go/vt/vtgate/vindexes.Keyspace size += cached.Keyspace.CachedSize(true) @@ -891,7 +891,7 @@ func (cached *Route) CachedSize(alloc bool) int64 { size += hack.RuntimeAllocSize(int64(len(cached.FieldQuery))) // field OrderBy []vitess.io/vitess/go/vt/vtgate/engine.OrderByParams { - size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(38)) + size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(40)) } // field RoutingParameters *vitess.io/vitess/go/vt/vtgate/engine.RoutingParameters size += cached.RoutingParameters.CachedSize(true) @@ -960,7 +960,7 @@ func (cached *Rows) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(48) + size += int64(64) } // field rows [][]vitess.io/vitess/go/sqltypes.Value { @@ -1084,7 +1084,7 @@ func (cached *SessionPrimitive) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(24) + size += int64(32) } // field name string size += hack.RuntimeAllocSize(int64(len(cached.name))) @@ -1119,7 +1119,7 @@ func (cached *ShowExec) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field ShowFilter *vitess.io/vitess/go/vt/sqlparser.ShowFilter size += cached.ShowFilter.CachedSize(true) @@ -1317,7 +1317,7 @@ func (cached *TableDML) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(144) + size += int64(160) } // field Queries []*vitess.io/vitess/go/vt/proto/query.BoundQuery { @@ -1360,7 +1360,7 @@ func (cached *TableDelete) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(8) + size += int64(16) } // field TableDML *vitess.io/vitess/go/vt/vtgate/engine.TableDML size += cached.TableDML.CachedSize(true) @@ -1372,7 +1372,7 @@ func (cached *TableRoute) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(112) + size += int64(128) } // field TableNames []string { @@ -1393,7 +1393,7 @@ func (cached *TableRoute) CachedSize(alloc bool) int64 { size += hack.RuntimeAllocSize(int64(len(cached.FieldQuery))) // field OrderBy []vitess.io/vitess/go/vt/vtgate/engine.OrderByParams { - size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(38)) + size += hack.RuntimeAllocSize(int64(cap(cached.OrderBy)) * int64(40)) } return size } @@ -1489,7 +1489,7 @@ func (cached *TableUpdate) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(8) + size += int64(16) } // field TableDML *vitess.io/vitess/go/vt/vtgate/engine.TableDML size += cached.TableDML.CachedSize(true) @@ -1501,7 +1501,7 @@ func (cached *ThrottleApp) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field Keyspace *vitess.io/vitess/go/vt/vtgate/vindexes.Keyspace size += cached.Keyspace.CachedSize(true) @@ -1539,7 +1539,7 @@ func (cached *Update) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field DML *vitess.io/vitess/go/vt/vtgate/engine.DML size += cached.DML.CachedSize(true) @@ -1566,7 +1566,7 @@ func (cached *UpdateTarget) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field Target string size += hack.RuntimeAllocSize(int64(len(cached.Target))) @@ -1608,7 +1608,7 @@ func (cached *VStream) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(64) + size += int64(80) } // field Keyspace *vitess.io/vitess/go/vt/vtgate/vindexes.Keyspace size += cached.Keyspace.CachedSize(true) diff --git a/go/vt/vtgate/engine/table_route.go b/go/vt/vtgate/engine/table_route.go index 7ba56a93279..1da448b8d3c 100644 --- a/go/vt/vtgate/engine/table_route.go +++ b/go/vt/vtgate/engine/table_route.go @@ -6,6 +6,7 @@ import ( "sort" "strings" "time" + "vitess.io/vitess/go/mysql/sqlerror" "vitess.io/vitess/go/sqltypes" diff --git a/go/vt/vtgate/evalengine/cached_size.go b/go/vt/vtgate/evalengine/cached_size.go index 69c39249fb9..91898fe770e 100644 --- a/go/vt/vtgate/evalengine/cached_size.go +++ b/go/vt/vtgate/evalengine/cached_size.go @@ -207,6 +207,10 @@ func (cached *ConvertExpr) CachedSize(alloc bool) int64 { size += cached.UnaryExpr.CachedSize(false) // field Type string size += hack.RuntimeAllocSize(int64(len(cached.Type))) + // field Length *int + size += hack.RuntimeAllocSize(int64(8)) + // field Scale *int + size += hack.RuntimeAllocSize(int64(8)) return size } func (cached *ConvertUsingExpr) CachedSize(alloc bool) int64 { @@ -1631,6 +1635,8 @@ func (cached *builtinWeightString) CachedSize(alloc bool) int64 { } // field Cast string size += hack.RuntimeAllocSize(int64(len(cached.Cast))) + // field Len *int + size += hack.RuntimeAllocSize(int64(8)) return size } func (cached *builtinYear) CachedSize(alloc bool) int64 { @@ -1677,7 +1683,7 @@ func (cached *evalDecimal) CachedSize(alloc bool) int64 { } size := int64(0) if alloc { - size += int64(16) + size += int64(24) } // field dec vitess.io/vitess/go/mysql/decimal.Decimal size += cached.dec.CachedSize(false) diff --git a/go/vt/vtgate/evalengine/compiler.go b/go/vt/vtgate/evalengine/compiler.go index 23f7a9f10aa..a065825166c 100644 --- a/go/vt/vtgate/evalengine/compiler.go +++ b/go/vt/vtgate/evalengine/compiler.go @@ -449,7 +449,7 @@ func (c *compiler) compileToJSONKey(key ctype) error { if key.Type == sqltypes.VarBinary { return nil } - c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, nil) return nil } diff --git a/go/vt/vtgate/evalengine/compiler_asm.go b/go/vt/vtgate/evalengine/compiler_asm.go index 99d81475c96..663512b32eb 100644 --- a/go/vt/vtgate/evalengine/compiler_asm.go +++ b/go/vt/vtgate/evalengine/compiler_asm.go @@ -1025,15 +1025,16 @@ func (asm *assembler) Convert_ui(offset int) { }, "CONV UINT64(SP-%d), INT64", offset) } -func (asm *assembler) Convert_xb(offset int, t sqltypes.Type, length int, hasLength bool) { - if hasLength { +func (asm *assembler) Convert_xb(offset int, t sqltypes.Type, length *int) { + if length != nil { + l := *length asm.emit(func(env *ExpressionEnv) int { arg := evalToBinary(env.vm.stack[env.vm.sp-offset]) - arg.truncateInPlace(length) + arg.truncateInPlace(l) arg.tt = int16(t) env.vm.stack[env.vm.sp-offset] = arg return 1 - }, "CONV (SP-%d), VARBINARY[%d]", offset, length) + }, "CONV (SP-%d), VARBINARY[%d]", offset, l) } else { asm.emit(func(env *ExpressionEnv) int { arg := evalToBinary(env.vm.stack[env.vm.sp-offset]) @@ -1044,19 +1045,20 @@ func (asm *assembler) Convert_xb(offset int, t sqltypes.Type, length int, hasLen } } -func (asm *assembler) Convert_xc(offset int, t sqltypes.Type, collation collations.ID, length int, hasLength bool) { - if hasLength { +func (asm *assembler) Convert_xc(offset int, t sqltypes.Type, collation collations.ID, length *int) { + if length != nil { + l := *length asm.emit(func(env *ExpressionEnv) int { arg, err := evalToVarchar(env.vm.stack[env.vm.sp-offset], collation, true) if err != nil { env.vm.stack[env.vm.sp-offset] = nil } else { - arg.truncateInPlace(length) + arg.truncateInPlace(l) arg.tt = int16(t) env.vm.stack[env.vm.sp-offset] = arg } return 1 - }, "CONV (SP-%d), VARCHAR[%d]", offset, length) + }, "CONV (SP-%d), VARCHAR[%d]", offset, l) } else { asm.emit(func(env *ExpressionEnv) int { arg, err := evalToVarchar(env.vm.stack[env.vm.sp-offset], collation, true) @@ -3332,31 +3334,32 @@ func cmpnum[N interface{ int64 | uint64 | float64 }](a, b N) int { } } -func (asm *assembler) Fn_Now(t querypb.Type, format *datetime.Strftime, prec uint8, utc bool) { +func (asm *assembler) Fn_Now(prec uint8, utc bool) { asm.adjustStack(1) asm.emit(func(env *ExpressionEnv) int { - val := env.vm.arena.newEvalBytesEmpty() - val.tt = int16(t) - val.bytes = format.Format(env.time(utc), prec) - val.col = collationBinary - env.vm.stack[env.vm.sp] = val + env.vm.stack[env.vm.sp] = env.vm.arena.newEvalDateTime(env.time(utc), int(prec)) env.vm.sp++ return 1 - }, "FN NOW") + }, "FN NOW(DATETIME)") +} + +func (asm *assembler) Fn_NowTime(prec uint8, utc bool) { + asm.adjustStack(1) + asm.emit(func(env *ExpressionEnv) int { + env.vm.stack[env.vm.sp] = env.vm.arena.newEvalTime(env.time(utc).Time, int(prec)) + env.vm.sp++ + return 1 + }, "FN NOW(TIME)") } func (asm *assembler) Fn_Sysdate(prec uint8) { asm.adjustStack(1) asm.emit(func(env *ExpressionEnv) int { - val := env.vm.arena.newEvalBytesEmpty() - val.tt = int16(sqltypes.Datetime) now := SystemTime() if tz := env.currentTimezone(); tz != nil { now = now.In(tz) } - val.bytes = datetime.NewDateTimeFromStd(now).Format(prec) - val.col = collationBinary - env.vm.stack[env.vm.sp] = val + env.vm.stack[env.vm.sp] = env.vm.arena.newEvalDateTime(datetime.NewDateTimeFromStd(now), int(prec)) env.vm.sp++ return 1 }, "FN SYSDATE") @@ -3365,11 +3368,7 @@ func (asm *assembler) Fn_Sysdate(prec uint8) { func (asm *assembler) Fn_Curdate() { asm.adjustStack(1) asm.emit(func(env *ExpressionEnv) int { - val := env.vm.arena.newEvalBytesEmpty() - val.tt = int16(sqltypes.Date) - val.bytes = datetime.Date_YYYY_MM_DD.Format(env.time(false), 0) - val.col = collationBinary - env.vm.stack[env.vm.sp] = val + env.vm.stack[env.vm.sp] = env.vm.arena.newEvalDate(env.time(false).Date) env.vm.sp++ return 1 }, "FN CURDATE") @@ -3378,11 +3377,7 @@ func (asm *assembler) Fn_Curdate() { func (asm *assembler) Fn_UtcDate() { asm.adjustStack(1) asm.emit(func(env *ExpressionEnv) int { - val := env.vm.arena.newEvalBytesEmpty() - val.tt = int16(sqltypes.Date) - val.bytes = datetime.Date_YYYY_MM_DD.Format(env.time(true), 0) - val.col = collationBinary - env.vm.stack[env.vm.sp] = val + env.vm.stack[env.vm.sp] = env.vm.arena.newEvalDate(env.time(true).Date) env.vm.sp++ return 1 }, "FN UTC_DATE") diff --git a/go/vt/vtgate/evalengine/compiler_fn.go b/go/vt/vtgate/evalengine/compiler_fn.go index b17fee7fedc..88dbd983287 100644 --- a/go/vt/vtgate/evalengine/compiler_fn.go +++ b/go/vt/vtgate/evalengine/compiler_fn.go @@ -76,7 +76,7 @@ func (c *compiler) compileFn_length(arg Expr, asm_ins func()) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, nil) } asm_ins() diff --git a/go/vt/vtgate/evalengine/eval_temporal.go b/go/vt/vtgate/evalengine/eval_temporal.go index 13acc5bd290..e23deeb8923 100644 --- a/go/vt/vtgate/evalengine/eval_temporal.go +++ b/go/vt/vtgate/evalengine/eval_temporal.go @@ -151,7 +151,7 @@ func (e *evalTemporal) addInterval(interval *datetime.Interval, strcoll collatio tmp.dt.Time, tmp.prec, ok = e.dt.Time.AddInterval(interval, strcoll.Valid()) case tt == sqltypes.Datetime || tt == sqltypes.Timestamp || (tt == sqltypes.Date && interval.Unit().HasTimeParts()) || (tt == sqltypes.Time && interval.Unit().HasDateParts()): tmp = e.toDateTime(int(e.prec)) - tmp.dt, tmp.prec, ok = e.dt.AddInterval(interval, strcoll.Valid()) + tmp.dt, tmp.prec, ok = e.dt.AddInterval(interval, tmp.prec, strcoll.Valid()) } if !ok { return nil diff --git a/go/vt/vtgate/evalengine/expr_compare.go b/go/vt/vtgate/evalengine/expr_compare.go index c98802818a8..b723609160c 100644 --- a/go/vt/vtgate/evalengine/expr_compare.go +++ b/go/vt/vtgate/evalengine/expr_compare.go @@ -628,7 +628,7 @@ func (expr *LikeExpr) compile(c *compiler) (ctype, error) { skip := c.compileNullCheck2(lt, rt) if !lt.isTextual() { - c.asm.Convert_xc(2, sqltypes.VarChar, c.cfg.Collation, 0, false) + c.asm.Convert_xc(2, sqltypes.VarChar, c.cfg.Collation, nil) lt.Col = collations.TypedCollation{ Collation: c.cfg.Collation, Coercibility: collations.CoerceCoercible, @@ -637,7 +637,7 @@ func (expr *LikeExpr) compile(c *compiler) (ctype, error) { } if !rt.isTextual() { - c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, nil) rt.Col = collations.TypedCollation{ Collation: c.cfg.Collation, Coercibility: collations.CoerceCoercible, diff --git a/go/vt/vtgate/evalengine/expr_convert.go b/go/vt/vtgate/evalengine/expr_convert.go index 6531cdd6fae..550324f62df 100644 --- a/go/vt/vtgate/evalengine/expr_convert.go +++ b/go/vt/vtgate/evalengine/expr_convert.go @@ -19,6 +19,7 @@ package evalengine import ( "vitess.io/vitess/go/mysql/collations" "vitess.io/vitess/go/mysql/collations/colldata" + "vitess.io/vitess/go/ptr" "vitess.io/vitess/go/sqltypes" querypb "vitess.io/vitess/go/vt/proto/query" vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" @@ -28,10 +29,9 @@ import ( type ( ConvertExpr struct { UnaryExpr - Type string - Length, Scale int - HasLength, HasScale bool - Collation collations.ID + Type string + Length, Scale *int + Collation collations.ID } ConvertUsingExpr struct { @@ -46,10 +46,10 @@ var _ Expr = (*ConvertUsingExpr)(nil) func (c *ConvertExpr) returnUnsupportedError() error { var err error switch { - case c.HasLength && c.HasScale: - err = vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "Unsupported type conversion: %s(%d,%d)", c.Type, c.Length, c.Scale) - case c.HasLength: - err = vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "Unsupported type conversion: %s(%d)", c.Type, c.Length) + case c.Length != nil && c.Scale != nil: + err = vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "Unsupported type conversion: %s(%d,%d)", c.Type, *c.Length, *c.Scale) + case c.Length != nil: + err = vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "Unsupported type conversion: %s(%d)", c.Type, *c.Length) default: err = vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "Unsupported type conversion: %s", c.Type) } @@ -59,11 +59,11 @@ func (c *ConvertExpr) returnUnsupportedError() error { func (c *ConvertExpr) decimalPrecision() (int32, int32) { m := 10 d := 0 - if c.HasLength { - m = c.Length + if c.Length != nil { + m = *c.Length } - if c.HasScale { - d = c.Scale + if c.Scale != nil { + d = *c.Scale } if m == 0 && d == 0 { m = 10 @@ -83,8 +83,8 @@ func (c *ConvertExpr) eval(env *ExpressionEnv) (eval, error) { switch c.Type { case "BINARY": b := evalToBinary(e) - if c.HasLength { - b.truncateInPlace(c.Length) + if c.Length != nil { + b.truncateInPlace(*c.Length) } b.tt = int16(c.convertToBinaryType(e.SQLType())) return b, nil @@ -95,8 +95,8 @@ func (c *ConvertExpr) eval(env *ExpressionEnv) (eval, error) { // return NULL on error return nil, nil } - if c.HasLength { - t.truncateInPlace(c.Length) + if c.Length != nil { + t.truncateInPlace(*c.Length) } t.tt = int16(c.convertToCharType(e.SQLType())) return t, nil @@ -107,8 +107,8 @@ func (c *ConvertExpr) eval(env *ExpressionEnv) (eval, error) { f, _ := evalToFloat(e) return f, nil case "FLOAT": - if c.HasLength { - switch p := c.Length; { + if c.Length != nil { + switch p := *c.Length; { case p > 53: return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "Too-big precision %d specified for 'CONVERT'. Maximum is 53.", p) } @@ -121,11 +121,11 @@ func (c *ConvertExpr) eval(env *ExpressionEnv) (eval, error) { case "JSON": return evalToJSON(e) case "DATETIME": - switch p := c.Length; { - case p > 6: + p := ptr.Unwrap(c.Length, 0) + if p > 6 { return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "Too-big precision %d specified for 'CONVERT'. Maximum is 6.", p) } - if dt := evalToDateTime(e, c.Length); dt != nil { + if dt := evalToDateTime(e, p); dt != nil { return dt, nil } return nil, nil @@ -135,11 +135,11 @@ func (c *ConvertExpr) eval(env *ExpressionEnv) (eval, error) { } return nil, nil case "TIME": - switch p := c.Length; { - case p > 6: + p := ptr.Unwrap(c.Length, 0) + if p > 6 { return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "Too-big precision %d specified for 'CONVERT'. Maximum is 6.", p) } - if t := evalToTime(e, c.Length); t != nil { + if t := evalToTime(e, p); t != nil { return t, nil } return nil, nil @@ -184,8 +184,8 @@ func (c *ConvertExpr) typeof(env *ExpressionEnv, fields []*querypb.Field) (sqlty } func (c *ConvertExpr) convertToBinaryType(tt sqltypes.Type) sqltypes.Type { - if c.HasLength { - if c.Length > 64*1024 { + if c.Length != nil { + if *c.Length > 64*1024 { return sqltypes.Blob } } else if tt == sqltypes.Blob || tt == sqltypes.TypeJSON { @@ -195,9 +195,9 @@ func (c *ConvertExpr) convertToBinaryType(tt sqltypes.Type) sqltypes.Type { } func (c *ConvertExpr) convertToCharType(tt sqltypes.Type) sqltypes.Type { - if c.HasLength { + if c.Length != nil { col := colldata.Lookup(c.Collation) - length := c.Length * col.Charset().MaxWidth() + length := *c.Length * col.Charset().MaxWidth() if length > 64*1024 { return sqltypes.Text } @@ -219,18 +219,18 @@ func (conv *ConvertExpr) compile(c *compiler) (ctype, error) { switch conv.Type { case "BINARY": convt = ctype{Type: conv.convertToBinaryType(arg.Type), Col: collationBinary} - c.asm.Convert_xb(1, convt.Type, conv.Length, conv.HasLength) + c.asm.Convert_xb(1, convt.Type, conv.Length) case "CHAR", "NCHAR": convt = ctype{ Type: conv.convertToCharType(arg.Type), Col: collations.TypedCollation{Collation: conv.Collation}, } - c.asm.Convert_xc(1, convt.Type, convt.Col.Collation, conv.Length, conv.HasLength) + c.asm.Convert_xc(1, convt.Type, convt.Col.Collation, conv.Length) case "DECIMAL": - convt = ctype{Type: sqltypes.Decimal, Col: collationNumeric} m, d := conv.decimalPrecision() + convt = ctype{Type: sqltypes.Decimal, Col: collationNumeric} c.asm.Convert_xd(1, m, d) case "DOUBLE", "REAL": @@ -256,18 +256,18 @@ func (conv *ConvertExpr) compile(c *compiler) (ctype, error) { convt = c.compileToDate(arg, 1) case "DATETIME": - switch p := conv.Length; { - case p > 6: + p := ptr.Unwrap(conv.Length, 0) + if p > 6 { return ctype{}, c.unsupported(conv) } - convt = c.compileToDateTime(arg, 1, conv.Length) + convt = c.compileToDateTime(arg, 1, p) case "TIME": - switch p := conv.Length; { - case p > 6: + p := ptr.Unwrap(conv.Length, 0) + if p > 6 { return ctype{}, c.unsupported(conv) } - convt = c.compileToTime(arg, 1, conv.Length) + convt = c.compileToTime(arg, 1, p) default: return ctype{}, c.unsupported(conv) @@ -306,7 +306,7 @@ func (conv *ConvertUsingExpr) compile(c *compiler) (ctype, error) { } skip := c.compileNullCheck1(ct) - c.asm.Convert_xc(1, sqltypes.VarChar, conv.Collation, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, conv.Collation, nil) c.asm.jumpDestination(skip) col := collations.TypedCollation{ diff --git a/go/vt/vtgate/evalengine/fn_base64.go b/go/vt/vtgate/evalengine/fn_base64.go index 6beb7b0209c..86a83ce696a 100644 --- a/go/vt/vtgate/evalengine/fn_base64.go +++ b/go/vt/vtgate/evalengine/fn_base64.go @@ -112,7 +112,7 @@ func (call *builtinToBase64) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, t, 0, false) + c.asm.Convert_xb(1, t, nil) } col := defaultCoercionCollation(c.cfg.Collation) @@ -166,7 +166,7 @@ func (call *builtinFromBase64) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, t, 0, false) + c.asm.Convert_xb(1, t, nil) } c.asm.Fn_FROM_BASE64(t) diff --git a/go/vt/vtgate/evalengine/fn_crypto.go b/go/vt/vtgate/evalengine/fn_crypto.go index 491fa7bcf68..60f460c0f77 100644 --- a/go/vt/vtgate/evalengine/fn_crypto.go +++ b/go/vt/vtgate/evalengine/fn_crypto.go @@ -68,7 +68,7 @@ func (call *builtinMD5) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.Binary, 0, false) + c.asm.Convert_xb(1, sqltypes.Binary, nil) } col := defaultCoercionCollation(c.cfg.Collation) @@ -116,7 +116,7 @@ func (call *builtinSHA1) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.Binary, 0, false) + c.asm.Convert_xb(1, sqltypes.Binary, nil) } col := defaultCoercionCollation(c.cfg.Collation) c.asm.Fn_SHA1(col) @@ -190,7 +190,7 @@ func (call *builtinSHA2) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(2, sqltypes.Binary, 0, false) + c.asm.Convert_xb(2, sqltypes.Binary, nil) } switch bits.Type { diff --git a/go/vt/vtgate/evalengine/fn_hex.go b/go/vt/vtgate/evalengine/fn_hex.go index ca84921e049..71e748d1896 100644 --- a/go/vt/vtgate/evalengine/fn_hex.go +++ b/go/vt/vtgate/evalengine/fn_hex.go @@ -82,7 +82,7 @@ func (call *builtinHex) compile(c *compiler) (ctype, error) { case str.isTextual(): c.asm.Fn_HEX_c(t, col) default: - c.asm.Convert_xc(1, t, c.cfg.Collation, 0, false) + c.asm.Convert_xc(1, t, c.cfg.Collation, nil) c.asm.Fn_HEX_c(t, col) } @@ -208,7 +208,7 @@ func (call *builtinUnhex) compile(c *compiler) (ctype, error) { case str.Type == sqltypes.TypeJSON: c.asm.Fn_UNHEX_j(t) default: - c.asm.Convert_xb(1, t, 0, false) + c.asm.Convert_xb(1, t, nil) c.asm.Fn_UNHEX_b(t) } diff --git a/go/vt/vtgate/evalengine/fn_misc.go b/go/vt/vtgate/evalengine/fn_misc.go index 966017edbc7..6d22acea4d5 100644 --- a/go/vt/vtgate/evalengine/fn_misc.go +++ b/go/vt/vtgate/evalengine/fn_misc.go @@ -125,7 +125,7 @@ func (call *builtinInetAton) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } c.asm.Fn_INET_ATON() @@ -199,7 +199,7 @@ func (call *builtinInet6Aton) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } c.asm.Fn_INET6_ATON() @@ -313,7 +313,7 @@ func (call *builtinIsIPV4) compile(c *compiler) (ctype, error) { switch { case arg.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } c.asm.Fn_IS_IPV4() @@ -419,7 +419,7 @@ func (call *builtinIsIPV6) compile(c *compiler) (ctype, error) { switch { case arg.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } c.asm.Fn_IS_IPV6() @@ -498,7 +498,7 @@ func (call *builtinBinToUUID) compile(c *compiler) (ctype, error) { switch { case arg.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } col := defaultCoercionCollation(call.collate) @@ -556,7 +556,7 @@ func (call *builtinIsUUID) compile(c *compiler) (ctype, error) { switch { case arg.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } c.asm.Fn_IS_UUID() @@ -633,7 +633,7 @@ func (call *builtinUUIDToBin) compile(c *compiler) (ctype, error) { switch { case arg.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } ct := ctype{Type: sqltypes.VarBinary, Flag: nullableFlags(arg.Flag), Col: collationBinary} diff --git a/go/vt/vtgate/evalengine/fn_numeric.go b/go/vt/vtgate/evalengine/fn_numeric.go index a69a6e35a86..46063acc1c0 100644 --- a/go/vt/vtgate/evalengine/fn_numeric.go +++ b/go/vt/vtgate/evalengine/fn_numeric.go @@ -1428,7 +1428,7 @@ func (expr *builtinCrc32) compile(c *compiler) (ctype, error) { switch { case arg.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.Binary, 0, false) + c.asm.Convert_xb(1, sqltypes.Binary, nil) } c.asm.Fn_CRC32() @@ -1540,7 +1540,7 @@ func (expr *builtinConv) compile(c *compiler) (ctype, error) { switch { case n.isTextual(): default: - c.asm.Convert_xb(3, t, 0, false) + c.asm.Convert_xb(3, t, nil) } if n.isHexOrBitLiteral() { diff --git a/go/vt/vtgate/evalengine/fn_regexp.go b/go/vt/vtgate/evalengine/fn_regexp.go index 2ba5b97573f..9b7b5170fb5 100644 --- a/go/vt/vtgate/evalengine/fn_regexp.go +++ b/go/vt/vtgate/evalengine/fn_regexp.go @@ -581,7 +581,7 @@ func (r *builtinRegexpInstr) compile(c *compiler) (ctype, error) { switch { case matchType.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } } @@ -774,7 +774,7 @@ func (r *builtinRegexpSubstr) compile(c *compiler) (ctype, error) { switch { case matchType.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } } @@ -1031,7 +1031,7 @@ func (r *builtinRegexpReplace) compile(c *compiler) (ctype, error) { switch { case matchType.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } } diff --git a/go/vt/vtgate/evalengine/fn_string.go b/go/vt/vtgate/evalengine/fn_string.go index ef3d9037dd2..a91d36fa499 100644 --- a/go/vt/vtgate/evalengine/fn_string.go +++ b/go/vt/vtgate/evalengine/fn_string.go @@ -62,10 +62,9 @@ type ( } builtinWeightString struct { - Expr Expr - Cast string - Len int - HasLen bool + Expr Expr + Cast string + Len *int } builtinLeftRight struct { @@ -152,7 +151,7 @@ func (call *builtinChangeCase) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, c.cfg.Collation, nil) } c.asm.Fn_LUCASE(call.upcase) @@ -265,7 +264,7 @@ func (call *builtinASCII) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } c.asm.Fn_ASCII() @@ -325,7 +324,7 @@ func (call *builtinOrd) compile(c *compiler) (ctype, error) { case str.isTextual(): col = str.Col.Collation default: - c.asm.Convert_xc(1, sqltypes.VarChar, call.collate, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, call.collate, nil) } c.asm.Fn_ORD(col) @@ -415,7 +414,7 @@ func (expr *builtinRepeat) compile(c *compiler) (ctype, error) { switch { case str.isTextual(): default: - c.asm.Convert_xc(2, sqltypes.VarChar, c.cfg.Collation, 0, false) + c.asm.Convert_xc(2, sqltypes.VarChar, c.cfg.Collation, nil) } _ = c.compileToInt64(repeat, 1) @@ -484,7 +483,7 @@ func (c *builtinWeightString) eval(env *ExpressionEnv) (eval, error) { typ = sqltypes.Blob } - weights, _, err = evalWeightString(weights, evalToBinary(input), c.Len, 0) + weights, _, err = evalWeightString(weights, evalToBinary(input), *c.Len, 0) if err != nil { return nil, err } @@ -519,7 +518,7 @@ func (c *builtinWeightString) eval(env *ExpressionEnv) (eval, error) { } else { var strLen int if c.Cast == "char" { - strLen = c.Len + strLen = *c.Len } weights, _, err = evalWeightString(weights, val, strLen, 0) } @@ -549,14 +548,14 @@ func (call *builtinWeightString) compile(c *compiler) (ctype, error) { skip := c.compileNullCheck1(str) if call.Cast == "binary" { if !sqltypes.IsBinary(str.Type) { - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } switch str.Type { case sqltypes.Blob, sqltypes.Text, sqltypes.TypeJSON: typ = sqltypes.Blob } - c.asm.Fn_WEIGHT_STRING(typ, call.Len) + c.asm.Fn_WEIGHT_STRING(typ, *call.Len) c.asm.jumpDestination(skip) return ctype{Type: sqltypes.VarBinary, Flag: flagNullable | flagNull, Col: collationBinary}, nil } @@ -577,7 +576,7 @@ func (call *builtinWeightString) compile(c *compiler) (ctype, error) { } var strLen int if call.Cast == "char" { - strLen = call.Len + strLen = *call.Len } c.asm.Fn_WEIGHT_STRING(typ, strLen) @@ -652,7 +651,7 @@ func (call builtinLeftRight) compile(c *compiler) (ctype, error) { case str.isTextual(): col = str.Col default: - c.asm.Convert_xc(2, sqltypes.VarChar, col.Collation, 0, false) + c.asm.Convert_xc(2, sqltypes.VarChar, col.Collation, nil) } _ = c.compileToInt64(l, 1) @@ -962,7 +961,7 @@ func (call builtinTrim) compile(c *compiler) (ctype, error) { case str.isTextual(): col = str.Col default: - c.asm.Convert_xc(1, sqltypes.VarChar, col.Collation, 0, false) + c.asm.Convert_xc(1, sqltypes.VarChar, col.Collation, nil) } if len(call.Arguments) == 1 { diff --git a/go/vt/vtgate/evalengine/fn_time.go b/go/vt/vtgate/evalengine/fn_time.go index 99e0f27f755..9663cc69aab 100644 --- a/go/vt/vtgate/evalengine/fn_time.go +++ b/go/vt/vtgate/evalengine/fn_time.go @@ -180,11 +180,9 @@ var _ Expr = (*builtinYearWeek)(nil) func (call *builtinNow) eval(env *ExpressionEnv) (eval, error) { now := env.time(call.utc) if call.onlyTime { - buf := datetime.Time_hh_mm_ss.Format(now, call.prec) - return newEvalRaw(sqltypes.Time, buf, collationBinary), nil + return newEvalTime(now.Time, int(call.prec)), nil } else { - buf := datetime.DateTime_YYYY_MM_DD_hh_mm_ss.Format(now, call.prec) - return newEvalRaw(sqltypes.Datetime, buf, collationBinary), nil + return newEvalDateTime(now, int(call.prec)), nil } } @@ -196,17 +194,15 @@ func (call *builtinNow) typeof(_ *ExpressionEnv, _ []*querypb.Field) (sqltypes.T } func (call *builtinNow) compile(c *compiler) (ctype, error) { - var format *datetime.Strftime var t sqltypes.Type if call.onlyTime { - format = datetime.Time_hh_mm_ss t = sqltypes.Time + c.asm.Fn_NowTime(call.prec, call.utc) } else { - format = datetime.DateTime_YYYY_MM_DD_hh_mm_ss t = sqltypes.Datetime + c.asm.Fn_Now(call.prec, call.utc) } - c.asm.Fn_Now(t, format, call.prec, call.utc) return ctype{Type: t, Col: collationBinary}, nil } @@ -219,7 +215,7 @@ func (call *builtinSysdate) eval(env *ExpressionEnv) (eval, error) { if tz := env.currentTimezone(); tz != nil { now = now.In(tz) } - return newEvalRaw(sqltypes.Datetime, datetime.NewDateTimeFromStd(now).Format(call.prec), collationBinary), nil + return newEvalDateTime(datetime.NewDateTimeFromStd(now), int(call.prec)), nil } func (call *builtinSysdate) typeof(_ *ExpressionEnv, _ []*querypb.Field) (sqltypes.Type, typeFlag) { @@ -237,7 +233,7 @@ func (call *builtinSysdate) constant() bool { func (call *builtinCurdate) eval(env *ExpressionEnv) (eval, error) { now := env.time(false) - return newEvalRaw(sqltypes.Date, datetime.Date_YYYY_MM_DD.Format(now, 0), collationBinary), nil + return newEvalDate(now.Date), nil } func (call *builtinCurdate) typeof(_ *ExpressionEnv, _ []*querypb.Field) (sqltypes.Type, typeFlag) { @@ -255,7 +251,7 @@ func (call *builtinCurdate) constant() bool { func (call *builtinUtcDate) eval(env *ExpressionEnv) (eval, error) { now := env.time(true) - return newEvalRaw(sqltypes.Date, datetime.Date_YYYY_MM_DD.Format(now, 0), collationBinary), nil + return newEvalDate(now.Date), nil } func (call *builtinUtcDate) typeof(_ *ExpressionEnv, _ []*querypb.Field) (sqltypes.Type, typeFlag) { @@ -326,7 +322,7 @@ func (call *builtinDateFormat) compile(c *compiler) (ctype, error) { switch format.Type { case sqltypes.VarChar, sqltypes.VarBinary: default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } col := defaultCoercionCollation(c.cfg.Collation) @@ -417,13 +413,13 @@ func (call *builtinConvertTz) compile(c *compiler) (ctype, error) { switch { case from.isTextual(): default: - c.asm.Convert_xb(2, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(2, sqltypes.VarBinary, nil) } switch { case to.isTextual(): default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } switch n.Type { @@ -726,7 +722,7 @@ func (call *builtinFromUnixtime) compile(c *compiler) (ctype, error) { switch format.Type { case sqltypes.VarChar, sqltypes.VarBinary: default: - c.asm.Convert_xb(1, sqltypes.VarBinary, 0, false) + c.asm.Convert_xb(1, sqltypes.VarBinary, nil) } col := defaultCoercionCollation(c.cfg.Collation) diff --git a/go/vt/vtgate/evalengine/format.go b/go/vt/vtgate/evalengine/format.go index 446d3e0f28f..5a2c8efa275 100644 --- a/go/vt/vtgate/evalengine/format.go +++ b/go/vt/vtgate/evalengine/format.go @@ -171,7 +171,7 @@ func (c *builtinWeightString) format(w *formatter, depth int) { c.Expr.format(w, depth) if c.Cast != "" { - fmt.Fprintf(w, " AS %s(%d)", strings.ToUpper(c.Cast), c.Len) + fmt.Fprintf(w, " as %s(%d)", c.Cast, *c.Len) } w.WriteByte(')') } @@ -195,10 +195,10 @@ func (c *ConvertExpr) format(buf *formatter, depth int) { c.Inner.format(buf, depth) switch { - case c.HasLength && c.HasScale: - fmt.Fprintf(buf, ", %s(%d,%d)", c.Type, c.Length, c.Scale) - case c.HasLength: - fmt.Fprintf(buf, ", %s(%d)", c.Type, c.Length) + case c.Length != nil && c.Scale != nil: + _, _ = fmt.Fprintf(buf, ", %s(%d,%d)", c.Type, *c.Length, *c.Scale) + case c.Length != nil: + _, _ = fmt.Fprintf(buf, ", %s(%d)", c.Type, *c.Length) default: fmt.Fprintf(buf, ", %s", c.Type) } diff --git a/go/vt/vtgate/evalengine/translate_builtin.go b/go/vt/vtgate/evalengine/translate_builtin.go index 4a4c3f1d9d2..563da6ea320 100644 --- a/go/vt/vtgate/evalengine/translate_builtin.go +++ b/go/vt/vtgate/evalengine/translate_builtin.go @@ -600,10 +600,7 @@ func (ast *astCompiler) translateCallable(call sqlparser.Callable) (Expr, error) } if call.As != nil { ws.Cast = strings.ToLower(call.As.Type) - ws.Len, ws.HasLen, err = ast.translateIntegral(call.As.Length) - if err != nil { - return nil, err - } + ws.Len = call.As.Length } return &ws, nil diff --git a/go/vt/vtgate/evalengine/translate_convert.go b/go/vt/vtgate/evalengine/translate_convert.go index 5560315f8e2..ab94854c91c 100644 --- a/go/vt/vtgate/evalengine/translate_convert.go +++ b/go/vt/vtgate/evalengine/translate_convert.go @@ -71,34 +71,30 @@ func (ast *astCompiler) translateConvertExpr(expr sqlparser.Expr, convertType *s return nil, err } - convert.Length, convert.HasLength, err = ast.translateIntegral(convertType.Length) - if err != nil { - return nil, err - } - - convert.Scale, convert.HasScale, err = ast.translateIntegral(convertType.Scale) - if err != nil { - return nil, err - } - + convert.Length = convertType.Length + convert.Scale = convertType.Scale convert.Type = strings.ToUpper(convertType.Type) switch convert.Type { case "DECIMAL": - if convert.Length < convert.Scale { - return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, - "For float(M,D), double(M,D) or decimal(M,D), M must be >= D (column '%s').", - "", // TODO: column name - ) - } - if convert.Length > decimal.MyMaxPrecision { - return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, - "Too-big precision %d specified for '%s'. Maximum is %d.", - convert.Length, sqlparser.String(expr), decimal.MyMaxPrecision) - } - if convert.Scale > decimal.MyMaxScale { - return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, - "Too big scale %d specified for column '%s'. Maximum is %d.", - convert.Scale, sqlparser.String(expr), decimal.MyMaxScale) + if convert.Length != nil { + if *convert.Length > decimal.MyMaxPrecision { + return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, + "Too-big precision %d specified for '%s'. Maximum is %d.", + *convert.Length, sqlparser.String(expr), decimal.MyMaxPrecision) + } + if convert.Scale != nil { + if *convert.Scale > decimal.MyMaxScale { + return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, + "Too big scale %d specified for column '%s'. Maximum is %d.", + *convert.Scale, sqlparser.String(expr), decimal.MyMaxScale) + } + if *convert.Length < *convert.Scale { + return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, + "For float(M,D), double(M,D) or decimal(M,D), M must be >= D (column '%s').", + "", // TODO: column name + ) + } + } } case "NCHAR": convert.Collation = collations.CollationUtf8mb3ID diff --git a/go/vt/vtgate/evalengine/translate_test.go b/go/vt/vtgate/evalengine/translate_test.go index d9ce3812abb..9959567f31c 100644 --- a/go/vt/vtgate/evalengine/translate_test.go +++ b/go/vt/vtgate/evalengine/translate_test.go @@ -88,7 +88,7 @@ func TestTranslateSimplification(t *testing.T) { {"coalesce(NULL, NULL)", ok("COALESCE(NULL, NULL)"), ok("NULL")}, {"coalesce(NULL)", ok("COALESCE(NULL)"), ok("NULL")}, {"weight_string('foobar')", ok(`WEIGHT_STRING(VARCHAR("foobar"))`), ok("VARBINARY(\"\\x1c\\xe5\\x1d\\xdd\\x1d\\xdd\\x1c`\\x1cG\\x1e3\")")}, - {"weight_string('foobar' as char(12))", ok(`WEIGHT_STRING(VARCHAR("foobar") AS CHAR(12))`), ok("VARBINARY(\"\\x1c\\xe5\\x1d\\xdd\\x1d\\xdd\\x1c`\\x1cG\\x1e3\")")}, + {"weight_string('foobar' as char(12))", ok(`WEIGHT_STRING(VARCHAR("foobar") as char(12))`), ok("VARBINARY(\"\\x1c\\xe5\\x1d\\xdd\\x1d\\xdd\\x1c`\\x1cG\\x1e3\")")}, {"case when 1 = 1 then 2 else 3 end", ok("CASE WHEN INT64(1) = INT64(1) THEN INT64(2) ELSE INT64(3)"), ok("INT64(2)")}, {"case when null then 2 when 12 = 4 then 'ohnoes' else 42 end", ok(`CASE WHEN NULL THEN INT64(2) WHEN INT64(12) = INT64(4) THEN VARCHAR("ohnoes") ELSE INT64(42)`), ok(`VARCHAR("42")`)}, {"convert('a', char(2) character set utf8mb4)", ok(`CONVERT(VARCHAR("a"), CHAR(2) CHARACTER SET utf8mb4_0900_ai_ci)`), ok(`VARCHAR("a")`)}, diff --git a/go/vt/vtgate/executor.go b/go/vt/vtgate/executor.go index ad4db357057..805b35ad5de 100644 --- a/go/vt/vtgate/executor.go +++ b/go/vt/vtgate/executor.go @@ -989,7 +989,7 @@ func (e *Executor) showVitessReplicationStatus(ctx context.Context, filter *sqlp tabletHostPort := ts.GetTabletHostPort() throttlerStatus, err := getTabletThrottlerStatus(tabletHostPort) if err != nil { - log.Warningf("Could not get throttler status from %s: %v", tabletHostPort, err) + log.Warningf("Could not get throttler status from %s: %v", topoproto.TabletAliasString(ts.Tablet.Alias), err) } replSourceHost := "" @@ -997,7 +997,7 @@ func (e *Executor) showVitessReplicationStatus(ctx context.Context, filter *sqlp replIOThreadHealth := "" replSQLThreadHealth := "" replLastError := "" - replLag := int64(-1) + replLag := "-1" // A string to support NULL as a value sql := "show slave status" results, err := e.txConn.tabletGateway.Execute(ctx, ts.Target, sql, nil, 0, 0, nil) if err != nil || results == nil { @@ -1008,8 +1008,25 @@ func (e *Executor) showVitessReplicationStatus(ctx context.Context, filter *sqlp replIOThreadHealth = row["Slave_IO_Running"].ToString() replSQLThreadHealth = row["Slave_SQL_Running"].ToString() replLastError = row["Last_Error"].ToString() - if ts.Stats != nil { - replLag = int64(ts.Stats.ReplicationLagSeconds) + // We cannot check the tablet's tabletenv config from here so + // we only use the tablet's stat -- which is managed by the + // ReplicationTracker -- if we can tell that it's enabled, + // meaning that it has a non-zero value. If it's actually + // enabled AND zero (rather than the zeroval), then mysqld + // should also return 0 so in this case the value is correct + // and equivalent either way. The only reason that we would + // want to use the ReplicationTracker based value, when we + // can, is because the polling method allows us to get the + // estimated lag value when replication is not running (based + // on how long we've seen that it's not been running). + if ts.Stats != nil && ts.Stats.ReplicationLagSeconds > 0 { // Use the value we get from the ReplicationTracker + replLag = fmt.Sprintf("%d", ts.Stats.ReplicationLagSeconds) + } else { // Use the value from mysqld + if row["Seconds_Behind_Master"].IsNull() { + replLag = strings.ToUpper(sqltypes.NullStr) // Uppercase to match mysqld's output in SHOW REPLICA STATUS + } else { + replLag = row["Seconds_Behind_Master"].ToString() + } } } replicationHealth := fmt.Sprintf("{\"EventStreamRunning\":\"%s\",\"EventApplierRunning\":\"%s\",\"LastError\":\"%s\"}", replIOThreadHealth, replSQLThreadHealth, replLastError) @@ -1022,7 +1039,7 @@ func (e *Executor) showVitessReplicationStatus(ctx context.Context, filter *sqlp ts.Tablet.Hostname, fmt.Sprintf("%s:%d", replSourceHost, replSourcePort), replicationHealth, - fmt.Sprintf("%d", replLag), + replLag, throttlerStatus, )) } @@ -1865,11 +1882,14 @@ func (e *Executor) checkThatPlanIsValid(stmt sqlparser.Statement, plan *engine.P return vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "plan includes scatter, which is disallowed using the `no_scatter` command line argument") } +// getTabletThrottlerStatus uses HTTP to get the throttler status +// on a tablet. It uses HTTP because the CheckThrottler RPC is a +// tmclient RPC and you cannot use tmclient outside of a tablet. func getTabletThrottlerStatus(tabletHostPort string) (string, error) { client := http.Client{ Timeout: 100 * time.Millisecond, } - resp, err := client.Get(fmt.Sprintf("http://%s/throttler/check?app=vtgate", tabletHostPort)) + resp, err := client.Get(fmt.Sprintf("http://%s/throttler/check-self", tabletHostPort)) if err != nil { return "", err } @@ -2088,7 +2108,9 @@ func (e *Executor) handleLoadData(ctx context.Context, safeSession *SafeSession, } tableConfig, err := e.VSchema().FindSplitTable(destKeyspace, tbName) - + if err != nil { + return nil, err + } vCursor, err := newVCursorImpl(safeSession, comments, e, logStats, e.vm, e.VSchema(), e.resolver.resolver, e.serv, e.warnShardedOnly, e.pv) if err != nil { return nil, err diff --git a/go/vt/vtgate/executor_select_test.go b/go/vt/vtgate/executor_select_test.go index da1385e491d..b5fe86e6745 100644 --- a/go/vt/vtgate/executor_select_test.go +++ b/go/vt/vtgate/executor_select_test.go @@ -26,28 +26,25 @@ import ( "testing" "time" - _flag "vitess.io/vitess/go/internal/flag" - "vitess.io/vitess/go/mysql/collations" - "vitess.io/vitess/go/streamlog" - "vitess.io/vitess/go/vt/vtgate/logstats" - - "vitess.io/vitess/go/vt/sqlparser" - "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + _flag "vitess.io/vitess/go/internal/flag" + "vitess.io/vitess/go/mysql/collations" "vitess.io/vitess/go/sqltypes" + "vitess.io/vitess/go/streamlog" "vitess.io/vitess/go/test/utils" "vitess.io/vitess/go/vt/discovery" - "vitess.io/vitess/go/vt/vterrors" - _ "vitess.io/vitess/go/vt/vtgate/vindexes" - "vitess.io/vitess/go/vt/vttablet/sandboxconn" - querypb "vitess.io/vitess/go/vt/proto/query" topodatapb "vitess.io/vitess/go/vt/proto/topodata" vtgatepb "vitess.io/vitess/go/vt/proto/vtgate" vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" + "vitess.io/vitess/go/vt/sqlparser" + "vitess.io/vitess/go/vt/vterrors" + "vitess.io/vitess/go/vt/vtgate/logstats" + _ "vitess.io/vitess/go/vt/vtgate/vindexes" + "vitess.io/vitess/go/vt/vttablet/sandboxconn" ) func TestSelectNext(t *testing.T) { @@ -1028,7 +1025,7 @@ func TestLastInsertIDInVirtualTable(t *testing.T) { _, err := executorExec(ctx, executor, session, "select * from (select last_insert_id()) as t", nil) require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select t.`last_insert_id()` from (select :__lastInsertId as `last_insert_id()` from dual) as t", + Sql: "select `last_insert_id()` from (select :__lastInsertId as `last_insert_id()` from dual) as t", BindVariables: map[string]*querypb.BindVariable{"__lastInsertId": sqltypes.Uint64BindVariable(0)}, }} @@ -1874,7 +1871,7 @@ func TestSelectScatterOrderBy(t *testing.T) { require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select col1, col2, weight_string(col2) from `user` order by col2 desc", + Sql: "select col1, col2, weight_string(col2) from `user` order by `user`.col2 desc", BindVariables: map[string]*querypb.BindVariable{}, }} for _, conn := range conns { @@ -1945,7 +1942,7 @@ func TestSelectScatterOrderByVarChar(t *testing.T) { require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select col1, textcol from `user` order by textcol desc", + Sql: "select col1, textcol from `user` order by `user`.textcol desc", BindVariables: map[string]*querypb.BindVariable{}, }} for _, conn := range conns { @@ -2011,7 +2008,7 @@ func TestStreamSelectScatterOrderBy(t *testing.T) { require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select id, col, weight_string(col) from `user` order by col desc", + Sql: "select id, col, weight_string(col) from `user` order by `user`.col desc", BindVariables: map[string]*querypb.BindVariable{}, }} for _, conn := range conns { @@ -2071,7 +2068,7 @@ func TestStreamSelectScatterOrderByVarChar(t *testing.T) { require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select id, textcol from `user` order by textcol desc", + Sql: "select id, textcol from `user` order by `user`.textcol desc", BindVariables: map[string]*querypb.BindVariable{}, }} for _, conn := range conns { @@ -2267,7 +2264,7 @@ func TestSelectScatterLimit(t *testing.T) { require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select col1, col2, weight_string(col2) from `user` order by col2 desc limit :__upper_limit", + Sql: "select col1, col2, weight_string(col2) from `user` order by `user`.col2 desc limit :__upper_limit", BindVariables: map[string]*querypb.BindVariable{"__upper_limit": sqltypes.Int64BindVariable(3)}, }} for _, conn := range conns { @@ -2339,7 +2336,7 @@ func TestStreamSelectScatterLimit(t *testing.T) { require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select col1, col2, weight_string(col2) from `user` order by col2 desc limit :__upper_limit", + Sql: "select col1, col2, weight_string(col2) from `user` order by `user`.col2 desc limit :__upper_limit", BindVariables: map[string]*querypb.BindVariable{"__upper_limit": sqltypes.Int64BindVariable(3)}, }} for _, conn := range conns { @@ -4125,7 +4122,7 @@ func TestSelectView(t *testing.T) { _, err = executor.Execute(context.Background(), nil, nil, "TestSelectView", session, "select * from user_details_view", nil) require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ - Sql: "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = user_extra.user_id) as user_details_view", + Sql: "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = user_extra.user_id) as user_details_view", BindVariables: map[string]*querypb.BindVariable{}, }} utils.MustMatch(t, wantQueries, sbc.Queries) @@ -4134,7 +4131,7 @@ func TestSelectView(t *testing.T) { _, err = executor.Execute(context.Background(), nil, nil, "TestSelectView", session, "select * from user_details_view where id = 2", nil) require.NoError(t, err) wantQueries = []*querypb.BoundQuery{{ - Sql: "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = :id /* INT64 */ and `user`.id = user_extra.user_id) as user_details_view", + Sql: "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = :id /* INT64 */ and `user`.id = user_extra.user_id) as user_details_view", BindVariables: map[string]*querypb.BindVariable{ "id": sqltypes.Int64BindVariable(2), }, @@ -4147,7 +4144,7 @@ func TestSelectView(t *testing.T) { bvtg1, _ := sqltypes.BuildBindVariable([]int64{1, 2, 3, 4, 5}) bvals, _ := sqltypes.BuildBindVariable([]int64{1, 2}) wantQueries = []*querypb.BoundQuery{{ - Sql: "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id in ::__vals and `user`.id = user_extra.user_id) as user_details_view", + Sql: "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id in ::__vals and `user`.id = user_extra.user_id) as user_details_view", BindVariables: map[string]*querypb.BindVariable{ "vtg1": bvtg1, "__vals": bvals, diff --git a/go/vt/vtgate/executor_test.go b/go/vt/vtgate/executor_test.go index 5ec32adc236..0b70c049743 100644 --- a/go/vt/vtgate/executor_test.go +++ b/go/vt/vtgate/executor_test.go @@ -47,17 +47,18 @@ import ( "vitess.io/vitess/go/sqltypes" "vitess.io/vitess/go/vt/callerid" "vitess.io/vitess/go/vt/discovery" - querypb "vitess.io/vitess/go/vt/proto/query" - topodatapb "vitess.io/vitess/go/vt/proto/topodata" - vschemapb "vitess.io/vitess/go/vt/proto/vschema" - vtgatepb "vitess.io/vitess/go/vt/proto/vtgate" - vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/vtgate/buffer" "vitess.io/vitess/go/vt/vtgate/logstats" "vitess.io/vitess/go/vt/vtgate/vindexes" "vitess.io/vitess/go/vt/vtgate/vschemaacl" "vitess.io/vitess/go/vt/vtgate/vtgateservice" + + querypb "vitess.io/vitess/go/vt/proto/query" + topodatapb "vitess.io/vitess/go/vt/proto/topodata" + vschemapb "vitess.io/vitess/go/vt/proto/vschema" + vtgatepb "vitess.io/vitess/go/vt/proto/vtgate" + vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" ) func TestExecutorResultsExceeded(t *testing.T) { diff --git a/go/vt/vtgate/load_data.go b/go/vt/vtgate/load_data.go index e5b23dfad98..4189464a177 100644 --- a/go/vt/vtgate/load_data.go +++ b/go/vt/vtgate/load_data.go @@ -11,9 +11,6 @@ import ( "time" "github.com/spf13/pflag" - "vitess.io/vitess/go/vt/servenv" - "vitess.io/vitess/go/vt/vtgate/logstats" - "golang.org/x/net/context" "vitess.io/vitess/go/hack" @@ -23,10 +20,12 @@ import ( "vitess.io/vitess/go/vt/log" querypb "vitess.io/vitess/go/vt/proto/query" vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" + "vitess.io/vitess/go/vt/servenv" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/srvtopo" "vitess.io/vitess/go/vt/vterrors" "vitess.io/vitess/go/vt/vtgate/engine" + "vitess.io/vitess/go/vt/vtgate/logstats" "vitess.io/vitess/go/vt/vtgate/vindexes" ) @@ -632,9 +631,9 @@ func (l *LoadDataInfo) getLoadShardedRoute(ctx context.Context, vcursor engine.V return nil, vterrors.Errorf(vtrpcpb.Code_FAILED_PRECONDITION, "load split table fields termined err ,please check") } if rowType, ok := rowTypeList[vIdx]; ok { - v, err = sqltypes.NewValue(rowType, []byte(row[tbColValues])) + _, _ = sqltypes.NewValue(rowType, []byte(row[tbColValues])) } else { - v, err = sqltypes.NewValue(sqltypes.Char, []byte(row[tbColValues])) + _, _ = sqltypes.NewValue(sqltypes.Char, []byte(row[tbColValues])) } v, err := sqltypes.NewValue(sqltypes.Char, []byte(row[tbColValues])) if err != nil { @@ -698,7 +697,7 @@ func (l *LoadDataInfo) getLoadShardedRoute(ctx context.Context, vcursor engine.V for _, val := range tbVindexRowsValues { firstCols = append(firstCols, val[0]) } - tableDestinations, err = tableVindex.(vindexes.TableSingleColumn).Map(ctx, vcursor, firstCols) + tableDestinations, err = tableVindex.Map(ctx, vcursor, firstCols) default: return nil, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "unsupported tableVindex: %v", tableVindex) } diff --git a/go/vt/vtgate/planbuilder/collations_test.go b/go/vt/vtgate/planbuilder/collations_test.go index 24fb038b4c2..c845200d46d 100644 --- a/go/vt/vtgate/planbuilder/collations_test.go +++ b/go/vt/vtgate/planbuilder/collations_test.go @@ -20,11 +20,10 @@ import ( "fmt" "testing" - "vitess.io/vitess/go/test/vschemawrapper" - "github.com/stretchr/testify/require" "vitess.io/vitess/go/mysql/collations" + "vitess.io/vitess/go/test/vschemawrapper" "vitess.io/vitess/go/vt/vtgate/engine" ) diff --git a/go/vt/vtgate/planbuilder/dml_planner.go b/go/vt/vtgate/planbuilder/dml_planner.go index ca7f9291d97..cab6c7ca78c 100644 --- a/go/vt/vtgate/planbuilder/dml_planner.go +++ b/go/vt/vtgate/planbuilder/dml_planner.go @@ -54,6 +54,7 @@ func rewriteRoutedTables(stmt sqlparser.Statement, vschema plancontext.VSchema) // if the user hasn't specified an alias, we'll insert one here so the old table name still works aliasTbl.As = sqlparser.NewIdentifierCS(name.String()) } + tableName.Qualifier = sqlparser.IdentifierCS{} tableName.Name = sqlparser.NewIdentifierCS(vschemaTable.Name.String()) aliasTbl.Expr = tableName } diff --git a/go/vt/vtgate/planbuilder/operators/aggregation_pushing.go b/go/vt/vtgate/planbuilder/operators/aggregation_pushing.go index 657d8d129fc..184e3f947c3 100644 --- a/go/vt/vtgate/planbuilder/operators/aggregation_pushing.go +++ b/go/vt/vtgate/planbuilder/operators/aggregation_pushing.go @@ -432,17 +432,12 @@ var errAbortAggrPushing = fmt.Errorf("abort aggregation pushing") func addColumnsFromLHSInJoinPredicates(ctx *plancontext.PlanningContext, rootAggr *Aggregator, join *ApplyJoin, lhs *joinPusher) error { for _, pred := range join.JoinPredicates { for _, bve := range pred.LHSExprs { - expr := bve.Expr - wexpr, err := rootAggr.QP.GetSimplifiedExpr(ctx, expr) - if err != nil { - return err - } - idx, found := canReuseColumn(ctx, lhs.pushed.Columns, expr, extractExpr) + idx, found := canReuseColumn(ctx, lhs.pushed.Columns, bve.Expr, extractExpr) if !found { idx = len(lhs.pushed.Columns) - lhs.pushed.Columns = append(lhs.pushed.Columns, aeWrap(expr)) + lhs.pushed.Columns = append(lhs.pushed.Columns, aeWrap(bve.Expr)) } - _, found = canReuseColumn(ctx, lhs.pushed.Grouping, wexpr, func(by GroupBy) sqlparser.Expr { + _, found = canReuseColumn(ctx, lhs.pushed.Grouping, bve.Expr, func(by GroupBy) sqlparser.Expr { return by.SimplifiedExpr }) @@ -451,8 +446,8 @@ func addColumnsFromLHSInJoinPredicates(ctx *plancontext.PlanningContext, rootAgg } lhs.pushed.Grouping = append(lhs.pushed.Grouping, GroupBy{ - Inner: expr, - SimplifiedExpr: wexpr, + Inner: bve.Expr, + SimplifiedExpr: bve.Expr, ColOffset: idx, WSOffset: -1, }) diff --git a/go/vt/vtgate/planbuilder/operators/distinct.go b/go/vt/vtgate/planbuilder/operators/distinct.go index 158d327c19c..a521f12c8db 100644 --- a/go/vt/vtgate/planbuilder/operators/distinct.go +++ b/go/vt/vtgate/planbuilder/operators/distinct.go @@ -52,16 +52,11 @@ func (d *Distinct) planOffsets(ctx *plancontext.PlanningContext) error { return err } for idx, col := range columns { - e, err := d.QP.GetSimplifiedExpr(ctx, col.Expr) - if err != nil { - // ambiguous columns are not a problem for DISTINCT - e = col.Expr - } var wsCol *int - typ, coll, _ := ctx.SemTable.TypeForExpr(e) + typ, coll, _ := ctx.SemTable.TypeForExpr(col.Expr) - if ctx.SemTable.NeedsWeightString(e) { - offset, err := d.Source.AddColumn(ctx, true, false, aeWrap(weightStringFor(e))) + if ctx.SemTable.NeedsWeightString(col.Expr) { + offset, err := d.Source.AddColumn(ctx, true, false, aeWrap(weightStringFor(col.Expr))) if err != nil { return err } diff --git a/go/vt/vtgate/planbuilder/operators/queryprojection.go b/go/vt/vtgate/planbuilder/operators/queryprojection.go index 361af13d156..59625430eee 100644 --- a/go/vt/vtgate/planbuilder/operators/queryprojection.go +++ b/go/vt/vtgate/planbuilder/operators/queryprojection.go @@ -21,7 +21,6 @@ import ( "fmt" "io" "slices" - "sort" "strings" "vitess.io/vitess/go/mysql/collations" @@ -216,9 +215,7 @@ func createQPFromSelect(ctx *plancontext.PlanningContext, sel *sqlparser.Select) if err := qp.addGroupBy(ctx, sel.GroupBy); err != nil { return nil, err } - if err := qp.addOrderBy(ctx, sel.OrderBy); err != nil { - return nil, err - } + qp.addOrderBy(ctx, sel.OrderBy) if !qp.HasAggr && sel.Having != nil { qp.HasAggr = containsAggr(sel.Having.Expr) } @@ -339,10 +336,7 @@ func createQPFromUnion(ctx *plancontext.PlanningContext, union *sqlparser.Union) return nil, err } - err = qp.addOrderBy(ctx, union.OrderBy) - if err != nil { - return nil, err - } + qp.addOrderBy(ctx, union.OrderBy) return qp, nil } @@ -364,29 +358,24 @@ func (es *expressionSet) add(ctx *plancontext.PlanningContext, e sqlparser.Expr) return true } -func (qp *QueryProjection) addOrderBy(ctx *plancontext.PlanningContext, orderBy sqlparser.OrderBy) error { +func (qp *QueryProjection) addOrderBy(ctx *plancontext.PlanningContext, orderBy sqlparser.OrderBy) { canPushSorting := true es := &expressionSet{} for _, order := range orderBy { - simpleExpr, err := qp.GetSimplifiedExpr(ctx, order.Expr) - if err != nil { - return err - } - if sqlparser.IsNull(simpleExpr) { + if sqlparser.IsNull(order.Expr) { // ORDER BY null can safely be ignored continue } - if !es.add(ctx, simpleExpr) { + if !es.add(ctx, order.Expr) { continue } qp.OrderExprs = append(qp.OrderExprs, ops.OrderBy{ Inner: sqlparser.CloneRefOfOrder(order), - SimplifiedExpr: simpleExpr, + SimplifiedExpr: order.Expr, }) - canPushSorting = canPushSorting && !containsAggr(simpleExpr) + canPushSorting = canPushSorting && !containsAggr(order.Expr) } qp.CanPushSorting = canPushSorting - return nil } func (qp *QueryProjection) calculateDistinct(ctx *plancontext.PlanningContext) error { @@ -435,20 +424,16 @@ func (qp *QueryProjection) addGroupBy(ctx *plancontext.PlanningContext, groupBy es := &expressionSet{} for _, group := range groupBy { selectExprIdx, aliasExpr := qp.FindSelectExprIndexForExpr(ctx, group) - simpleExpr, err := qp.GetSimplifiedExpr(ctx, group) - if err != nil { - return err - } - if err = checkForInvalidGroupingExpressions(simpleExpr); err != nil { + if err := checkForInvalidGroupingExpressions(group); err != nil { return err } - if !es.add(ctx, simpleExpr) { + if !es.add(ctx, group) { continue } - groupBy := NewGroupBy(group, simpleExpr, aliasExpr) + groupBy := NewGroupBy(group, group, aliasExpr) groupBy.InnerIndex = selectExprIdx qp.groupByExprs = append(qp.groupByExprs, groupBy) @@ -470,64 +455,6 @@ func (qp *QueryProjection) isExprInGroupByExprs(ctx *plancontext.PlanningContext return false } -// GetSimplifiedExpr takes an expression used in ORDER BY or GROUP BY, and returns an expression that is simpler to evaluate -func (qp *QueryProjection) GetSimplifiedExpr(ctx *plancontext.PlanningContext, e sqlparser.Expr) (found sqlparser.Expr, err error) { - if qp == nil { - return e, nil - } - // If the ORDER BY is against a column alias, we need to remember the expression - // behind the alias. The weightstring(.) calls needs to be done against that expression and not the alias. - // Eg - select music.foo as bar, weightstring(music.foo) from music order by bar - - in, isColName := e.(*sqlparser.ColName) - if !(isColName && in.Qualifier.IsEmpty()) { - // we are only interested in unqualified column names. if it's not a column name and not unqualified, we're done - return e, nil - } - - check := func(e sqlparser.Expr) error { - if found != nil && !ctx.SemTable.EqualsExprWithDeps(found, e) { - return &semantics.AmbiguousColumnError{Column: sqlparser.String(in)} - } - found = e - return nil - } - - for _, selectExpr := range qp.SelectExprs { - ae, ok := selectExpr.Col.(*sqlparser.AliasedExpr) - if !ok { - continue - } - aliased := !ae.As.IsEmpty() - if aliased { - if in.Name.Equal(ae.As) { - err = check(ae.Expr) - if err != nil { - return nil, err - } - } - } else { - seCol, ok := ae.Expr.(*sqlparser.ColName) - if !ok { - continue - } - if seCol.Name.Equal(in.Name) { - // If the column name matches, we have a match, even if the table name is not listed - err = check(ae.Expr) - if err != nil { - return nil, err - } - } - } - } - - if found == nil { - found = e - } - - return found, nil -} - // toString should only be used for tests func (qp *QueryProjection) toString() string { type output struct { @@ -793,46 +720,6 @@ func (qp *QueryProjection) FindSelectExprIndexForExpr(ctx *plancontext.PlanningC return nil, nil } -// OldAlignGroupByAndOrderBy TODO Remove once all of horizon planning is done on the operators -func (qp *QueryProjection) OldAlignGroupByAndOrderBy(ctx *plancontext.PlanningContext) { - // The ORDER BY can be performed before the OA - - var newGrouping []GroupBy - if len(qp.OrderExprs) == 0 { - // The query didn't ask for any particular order, so we are free to add arbitrary ordering. - // We'll align the grouping and ordering by the output columns - newGrouping = qp.GetGrouping() - SortGrouping(newGrouping) - for _, groupBy := range newGrouping { - qp.OrderExprs = append(qp.OrderExprs, groupBy.AsOrderBy()) - } - } else { - // Here we align the GROUP BY and ORDER BY. - // First step is to make sure that the GROUP BY is in the same order as the ORDER BY - used := make([]bool, len(qp.groupByExprs)) - for _, orderExpr := range qp.OrderExprs { - for i, groupingExpr := range qp.groupByExprs { - if !used[i] && ctx.SemTable.EqualsExpr(groupingExpr.SimplifiedExpr, orderExpr.SimplifiedExpr) { - newGrouping = append(newGrouping, groupingExpr) - used[i] = true - } - } - } - if len(newGrouping) != len(qp.groupByExprs) { - // we are missing some groupings. We need to add them both to the new groupings list, but also to the ORDER BY - for i, added := range used { - if !added { - groupBy := qp.groupByExprs[i] - newGrouping = append(newGrouping, groupBy) - qp.OrderExprs = append(qp.OrderExprs, groupBy.AsOrderBy()) - } - } - } - } - - qp.groupByExprs = newGrouping -} - // AlignGroupByAndOrderBy aligns the group by and order by columns, so they are in the same order // The GROUP BY clause is a set - the order between the elements does not make any difference, // so we can simply re-arrange the column order @@ -902,18 +789,14 @@ func (qp *QueryProjection) useGroupingOverDistinct(ctx *plancontext.PlanningCont // not an alias Expr, cannot continue forward. return false, nil } - sExpr, err := qp.GetSimplifiedExpr(ctx, ae.Expr) - if err != nil { - return false, err - } // check if the grouping already exists on that column. found := slices.IndexFunc(qp.groupByExprs, func(gb GroupBy) bool { - return ctx.SemTable.EqualsExprWithDeps(gb.SimplifiedExpr, sExpr) + return ctx.SemTable.EqualsExprWithDeps(gb.SimplifiedExpr, ae.Expr) }) if found != -1 { continue } - groupBy := NewGroupBy(ae.Expr, sExpr, ae) + groupBy := NewGroupBy(ae.Expr, ae.Expr, ae) selectExprIdx := idx groupBy.InnerIndex = &selectExprIdx @@ -937,12 +820,6 @@ func checkForInvalidGroupingExpressions(expr sqlparser.Expr) error { }, expr) } -func SortGrouping(a []GroupBy) { - sort.Slice(a, func(i, j int) bool { - return CompareRefInt(a[i].InnerIndex, a[j].InnerIndex) - }) -} - // CompareRefInt compares two references of integers. // In case either one is nil, it is considered to be smaller func CompareRefInt(a *int, b *int) bool { diff --git a/go/vt/vtgate/planbuilder/operators/queryprojection_test.go b/go/vt/vtgate/planbuilder/operators/queryprojection_test.go index 6a5d5feaa2d..d8ef721ee0e 100644 --- a/go/vt/vtgate/planbuilder/operators/queryprojection_test.go +++ b/go/vt/vtgate/planbuilder/operators/queryprojection_test.go @@ -47,27 +47,34 @@ func TestQP(t *testing.T) { { sql: "select 1, count(1) from user order by 1", expOrder: []ops.OrderBy{ - {Inner: &sqlparser.Order{Expr: sqlparser.NewIntLiteral("1")}, SimplifiedExpr: sqlparser.NewIntLiteral("1")}, + {Inner: &sqlparser.Order{Expr: sqlparser.NewStrLiteral("")}, SimplifiedExpr: sqlparser.NewStrLiteral("")}, }, }, { sql: "select id from user order by col, id, 1", expOrder: []ops.OrderBy{ {Inner: &sqlparser.Order{Expr: sqlparser.NewColName("col")}, SimplifiedExpr: sqlparser.NewColName("col")}, - {Inner: &sqlparser.Order{Expr: sqlparser.NewColName("id")}, SimplifiedExpr: sqlparser.NewColName("id")}, + {Inner: &sqlparser.Order{Expr: sqlparser.NewColNameWithQualifier("id", sqlparser.NewTableName("user"))}, SimplifiedExpr: sqlparser.NewColNameWithQualifier("id", sqlparser.NewTableName("user"))}, }, }, { sql: "SELECT CONCAT(last_name,', ',first_name) AS full_name FROM mytable ORDER BY full_name", // alias in order not supported expOrder: []ops.OrderBy{ { - Inner: &sqlparser.Order{Expr: sqlparser.NewColName("full_name")}, + Inner: &sqlparser.Order{Expr: &sqlparser.FuncExpr{ + Name: sqlparser.NewIdentifierCI("CONCAT"), + Exprs: sqlparser.SelectExprs{ + &sqlparser.AliasedExpr{Expr: sqlparser.NewColNameWithQualifier("last_name", sqlparser.NewTableName("mytable"))}, + &sqlparser.AliasedExpr{Expr: sqlparser.NewStrLiteral(", ")}, + &sqlparser.AliasedExpr{Expr: sqlparser.NewColNameWithQualifier("first_name", sqlparser.NewTableName("mytable"))}, + }, + }}, SimplifiedExpr: &sqlparser.FuncExpr{ Name: sqlparser.NewIdentifierCI("CONCAT"), Exprs: sqlparser.SelectExprs{ - &sqlparser.AliasedExpr{Expr: sqlparser.NewColName("last_name")}, + &sqlparser.AliasedExpr{Expr: sqlparser.NewColNameWithQualifier("last_name", sqlparser.NewTableName("mytable"))}, &sqlparser.AliasedExpr{Expr: sqlparser.NewStrLiteral(", ")}, - &sqlparser.AliasedExpr{Expr: sqlparser.NewColName("first_name")}, + &sqlparser.AliasedExpr{Expr: sqlparser.NewColNameWithQualifier("first_name", sqlparser.NewTableName("mytable"))}, }, }, }, @@ -134,7 +141,7 @@ func TestQPSimplifiedExpr(t *testing.T) { "Grouping": [], "OrderBy": [ "intcol asc", - "textcol asc" + "` + "`user`" + `.textcol asc" ], "Distinct": false }`, diff --git a/go/vt/vtgate/planbuilder/operators/union_merging.go b/go/vt/vtgate/planbuilder/operators/union_merging.go index 4df53b0c739..36fa7ec87e7 100644 --- a/go/vt/vtgate/planbuilder/operators/union_merging.go +++ b/go/vt/vtgate/planbuilder/operators/union_merging.go @@ -188,27 +188,32 @@ func createMergedUnion( cols := make(sqlparser.SelectExprs, len(lhsExprs)) noDeps := len(lhsExprs) != len(rhsExprs) for idx, col := range lhsExprs { - ae, ok := col.(*sqlparser.AliasedExpr) + lae, ok := col.(*sqlparser.AliasedExpr) if !ok { cols[idx] = col noDeps = true continue } - col := sqlparser.NewColName(ae.ColumnName()) + col := sqlparser.NewColName(lae.ColumnName()) cols[idx] = aeWrap(col) if noDeps { continue } - deps := ctx.SemTable.RecursiveDeps(ae.Expr) - ae, ok = rhsExprs[idx].(*sqlparser.AliasedExpr) + deps := ctx.SemTable.RecursiveDeps(lae.Expr) + rae, ok := rhsExprs[idx].(*sqlparser.AliasedExpr) if !ok { noDeps = true continue } - deps = deps.Merge(ctx.SemTable.RecursiveDeps(ae.Expr)) + deps = deps.Merge(ctx.SemTable.RecursiveDeps(rae.Expr)) + rt, _, foundR := ctx.SemTable.TypeForExpr(rae.Expr) + lt, _, foundL := ctx.SemTable.TypeForExpr(lae.Expr) + if foundR && foundL && rt == lt { + ctx.SemTable.CopySemanticInfo(rae.Expr, col) + ctx.SemTable.CopySemanticInfo(lae.Expr, col) + } ctx.SemTable.Recursive[col] = deps - ctx.SemTable.ExprTypes[col] = ctx.SemTable.ExprTypes[ae.Expr] } union := newUnion([]ops.Operator{lhsRoute.Source, rhsRoute.Source}, []sqlparser.SelectExprs{lhsExprs, rhsExprs}, cols, distinct) diff --git a/go/vt/vtgate/planbuilder/plan_test.go b/go/vt/vtgate/planbuilder/plan_test.go index e563926269a..a7450db3a05 100644 --- a/go/vt/vtgate/planbuilder/plan_test.go +++ b/go/vt/vtgate/planbuilder/plan_test.go @@ -501,7 +501,12 @@ func loadSchema(t testing.TB, filename string, setCollation bool) *vindexes.VSch for _, table := range ks.Tables { for i, col := range table.Columns { if sqltypes.IsText(col.Type) { - table.Columns[i].CollationName = "latin1_swedish_ci" + switch col.Name.String() { + case "textcol2": + table.Columns[i].CollationName = "big5_bin" + default: + table.Columns[i].CollationName = "latin1_swedish_ci" + } } } } diff --git a/go/vt/vtgate/planbuilder/rewrite.go b/go/vt/vtgate/planbuilder/rewrite.go index f59441c77ac..30ba06f25fa 100644 --- a/go/vt/vtgate/planbuilder/rewrite.go +++ b/go/vt/vtgate/planbuilder/rewrite.go @@ -82,6 +82,7 @@ func (r *rewriter) rewriteDown(cursor *sqlparser.Cursor) bool { node.As = tableName.Name } // replace the table name with the original table + tableName.Qualifier = sqlparser.IdentifierCS{} tableName.Name = vindexTable.Name node.Expr = tableName case *sqlparser.Subquery: diff --git a/go/vt/vtgate/planbuilder/testdata/aggr_cases.json b/go/vt/vtgate/planbuilder/testdata/aggr_cases.json index 5fc192c7bc4..c0850178bf8 100644 --- a/go/vt/vtgate/planbuilder/testdata/aggr_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/aggr_cases.json @@ -675,10 +675,15 @@ } }, { - "comment": "scatter aggregate group by aggregate function", + "comment": "scatter aggregate group by aggregate function - since we don't have authoratative columns for user, we can't be sure that the user isn't referring a column named b", "query": "select count(*) b from user group by b", "plan": "VT03005: cannot group on 'count(*)'" }, + { + "comment": "scatter aggregate group by aggregate function with column information", + "query": "select count(*) b from authoritative group by b", + "plan": "VT03005: cannot group on 'b'" + }, { "comment": "scatter aggregate multiple group by (columns)", "query": "select a, b, count(*) from user group by a, b", @@ -808,7 +813,7 @@ { "comment": "scatter aggregate group by invalid column number", "query": "select col from user group by 2", - "plan": "Unknown column '2' in 'group statement'" + "plan": "Unknown column '2' in 'group clause'" }, { "comment": "scatter aggregate order by null", @@ -893,7 +898,7 @@ }, "FieldQuery": "select a, b, c, d, count(*), weight_string(a), weight_string(b), weight_string(c) from `user` where 1 != 1 group by a, b, c, weight_string(a), weight_string(b), weight_string(c)", "OrderBy": "(0|5) ASC, (1|6) ASC, (2|7) ASC", - "Query": "select a, b, c, d, count(*), weight_string(a), weight_string(b), weight_string(c) from `user` group by a, b, c, weight_string(a), weight_string(b), weight_string(c) order by a asc, b asc, c asc", + "Query": "select a, b, c, d, count(*), weight_string(a), weight_string(b), weight_string(c) from `user` group by a, b, c, weight_string(a), weight_string(b), weight_string(c) order by `user`.a asc, `user`.b asc, `user`.c asc", "Table": "`user`" } ] @@ -925,7 +930,7 @@ }, "FieldQuery": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from `user` where 1 != 1 group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c)", "OrderBy": "(3|5) ASC, (1|6) ASC, (0|7) ASC, (2|8) ASC", - "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from `user` group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by d asc, b asc, a asc, c asc", + "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from `user` group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by `user`.d asc, `user`.b asc, `user`.a asc, `user`.c asc", "Table": "`user`" } ] @@ -957,7 +962,7 @@ }, "FieldQuery": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from `user` where 1 != 1 group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c)", "OrderBy": "(3|5) ASC, (1|6) ASC, (0|7) ASC, (2|8) ASC", - "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from `user` group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by d asc, b asc, a asc, c asc", + "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from `user` group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by `user`.d asc, `user`.b asc, `user`.a asc, `user`.c asc", "Table": "`user`" } ] @@ -989,7 +994,7 @@ }, "FieldQuery": "select a, b, c, count(*), weight_string(a), weight_string(c), weight_string(b) from `user` where 1 != 1 group by a, c, b, weight_string(a), weight_string(c), weight_string(b)", "OrderBy": "(0|4) DESC, (2|5) DESC, (1|6) ASC", - "Query": "select a, b, c, count(*), weight_string(a), weight_string(c), weight_string(b) from `user` group by a, c, b, weight_string(a), weight_string(c), weight_string(b) order by a desc, c desc, b asc", + "Query": "select a, b, c, count(*), weight_string(a), weight_string(c), weight_string(b) from `user` group by a, c, b, weight_string(a), weight_string(c), weight_string(b) order by a desc, c desc, `user`.b asc", "Table": "`user`" } ] @@ -1041,32 +1046,6 @@ ] } }, - { - "comment": "Group by with collate operator", - "query": "select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci", - "plan": { - "QueryType": "SELECT", - "Original": "select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci", - "Instructions": { - "OperatorType": "Route", - "Variant": "EqualUnique", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select `user`.col1 as a from `user` where 1 != 1 group by a collate utf8_general_ci", - "Query": "select `user`.col1 as a from `user` where `user`.id = 5 group by a collate utf8_general_ci", - "Table": "`user`", - "Values": [ - "INT64(5)" - ], - "Vindex": "user_index" - }, - "TablesUsed": [ - "user.user" - ] - } - }, { "comment": "routing rules for aggregates", "query": "select id, count(*) from route2 group by id", @@ -1103,7 +1082,7 @@ "Sharded": true }, "FieldQuery": "select col from ref where 1 != 1", - "Query": "select col from ref order by col asc", + "Query": "select col from ref order by ref.col asc", "Table": "ref" }, "TablesUsed": [ @@ -1207,7 +1186,7 @@ { "comment": "Group by out of range column number (code is duplicated from symab).", "query": "select id from user group by 2", - "plan": "Unknown column '2' in 'group statement'" + "plan": "Unknown column '2' in 'group clause'" }, { "comment": "here it is safe to remove the order by on the derived table since it will not influence the output of the count(*)", @@ -1584,10 +1563,10 @@ }, { "comment": "weight_string addition to group by", - "query": "select lower(textcol1) as v, count(*) from user group by v", + "query": "select lower(col1) as v, count(*) from authoritative group by v", "plan": { "QueryType": "SELECT", - "Original": "select lower(textcol1) as v, count(*) from user group by v", + "Original": "select lower(col1) as v, count(*) from authoritative group by v", "Instructions": { "OperatorType": "Aggregate", "Variant": "Ordered", @@ -1602,24 +1581,24 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select lower(textcol1) as v, count(*), weight_string(lower(textcol1)) from `user` where 1 != 1 group by v, weight_string(lower(textcol1))", + "FieldQuery": "select lower(col1) as v, count(*), weight_string(lower(col1)) from authoritative where 1 != 1 group by lower(col1), weight_string(lower(col1))", "OrderBy": "(0|2) ASC", - "Query": "select lower(textcol1) as v, count(*), weight_string(lower(textcol1)) from `user` group by v, weight_string(lower(textcol1)) order by v asc", - "Table": "`user`" + "Query": "select lower(col1) as v, count(*), weight_string(lower(col1)) from authoritative group by lower(col1), weight_string(lower(col1)) order by lower(col1) asc", + "Table": "authoritative" } ] }, "TablesUsed": [ - "user.user" + "user.authoritative" ] } }, { "comment": "weight_string addition to group by when also there in order by", - "query": "select char_length(texcol1) as a, count(*) from user group by a order by a", + "query": "select char_length(col1) as a, count(*) from authoritative group by a order by a", "plan": { "QueryType": "SELECT", - "Original": "select char_length(texcol1) as a, count(*) from user group by a order by a", + "Original": "select char_length(col1) as a, count(*) from authoritative group by a order by a", "Instructions": { "OperatorType": "Aggregate", "Variant": "Ordered", @@ -1634,15 +1613,15 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select char_length(texcol1) as a, count(*), weight_string(char_length(texcol1)) from `user` where 1 != 1 group by a, weight_string(char_length(texcol1))", + "FieldQuery": "select char_length(col1) as a, count(*), weight_string(char_length(col1)) from authoritative where 1 != 1 group by char_length(col1), weight_string(char_length(col1))", "OrderBy": "(0|2) ASC", - "Query": "select char_length(texcol1) as a, count(*), weight_string(char_length(texcol1)) from `user` group by a, weight_string(char_length(texcol1)) order by a asc", - "Table": "`user`" + "Query": "select char_length(col1) as a, count(*), weight_string(char_length(col1)) from authoritative group by char_length(col1), weight_string(char_length(col1)) order by char_length(authoritative.col1) asc", + "Table": "authoritative" } ] }, "TablesUsed": [ - "user.user" + "user.authoritative" ] } }, @@ -1699,7 +1678,7 @@ }, "FieldQuery": "select col, id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(1|2) ASC", - "Query": "select col, id, weight_string(id) from `user` order by id asc", + "Query": "select col, id, weight_string(id) from `user` order by `user`.id asc", "ResultColumns": 2, "Table": "`user`" }, @@ -2009,19 +1988,20 @@ }, { "comment": "Less Equal filter on scatter with grouping", - "query": "select col, count(*) a from user group by col having a <= 10", + "query": "select col1, count(*) a from user group by col1 having a <= 10", "plan": { "QueryType": "SELECT", - "Original": "select col, count(*) a from user group by col having a <= 10", + "Original": "select col1, count(*) a from user group by col1 having a <= 10", "Instructions": { "OperatorType": "Filter", "Predicate": "count(*) <= 10", + "ResultColumns": 2, "Inputs": [ { "OperatorType": "Aggregate", "Variant": "Ordered", "Aggregates": "sum_count_star(1) AS a", - "GroupBy": "0", + "GroupBy": "(0|2)", "Inputs": [ { "OperatorType": "Route", @@ -2030,9 +2010,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select col, count(*) as a from `user` where 1 != 1 group by col", - "OrderBy": "0 ASC", - "Query": "select col, count(*) as a from `user` group by col order by col asc", + "FieldQuery": "select col1, count(*) as a, weight_string(col1) from `user` where 1 != 1 group by col1, weight_string(col1)", + "OrderBy": "(0|2) ASC", + "Query": "select col1, count(*) as a, weight_string(col1) from `user` group by col1, weight_string(col1) order by col1 asc", "Table": "`user`" } ] @@ -2046,10 +2026,10 @@ }, { "comment": "We should be able to find grouping keys on ordered aggregates", - "query": "select count(*) as a, val1 from user group by val1 having a = 1.00", + "query": "select count(*) as a, col2 from user group by col2 having a = 1.00", "plan": { "QueryType": "SELECT", - "Original": "select count(*) as a, val1 from user group by val1 having a = 1.00", + "Original": "select count(*) as a, col2 from user group by col2 having a = 1.00", "Instructions": { "OperatorType": "Filter", "Predicate": "count(*) = 1.00", @@ -2068,9 +2048,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select count(*) as a, val1, weight_string(val1) from `user` where 1 != 1 group by val1, weight_string(val1)", + "FieldQuery": "select count(*) as a, col2, weight_string(col2) from `user` where 1 != 1 group by col2, weight_string(col2)", "OrderBy": "(1|2) ASC", - "Query": "select count(*) as a, val1, weight_string(val1) from `user` group by val1, weight_string(val1) order by val1 asc", + "Query": "select count(*) as a, col2, weight_string(col2) from `user` group by col2, weight_string(col2) order by col2 asc", "Table": "`user`" } ] @@ -2620,10 +2600,10 @@ }, { "comment": "group by column alias", - "query": "select ascii(val1) as a, count(*) from user group by a", + "query": "select ascii(col2) as a, count(*) from user group by a", "plan": { "QueryType": "SELECT", - "Original": "select ascii(val1) as a, count(*) from user group by a", + "Original": "select ascii(col2) as a, count(*) from user group by a", "Instructions": { "OperatorType": "Aggregate", "Variant": "Ordered", @@ -2638,9 +2618,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select ascii(val1) as a, count(*), weight_string(ascii(val1)) from `user` where 1 != 1 group by a, weight_string(ascii(val1))", + "FieldQuery": "select ascii(col2) as a, count(*), weight_string(ascii(col2)) from `user` where 1 != 1 group by ascii(col2), weight_string(ascii(col2))", "OrderBy": "(0|2) ASC", - "Query": "select ascii(val1) as a, count(*), weight_string(ascii(val1)) from `user` group by a, weight_string(ascii(val1)) order by a asc", + "Query": "select ascii(col2) as a, count(*), weight_string(ascii(col2)) from `user` group by ascii(col2), weight_string(ascii(col2)) order by ascii(col2) asc", "Table": "`user`" } ] @@ -2989,7 +2969,7 @@ "Original": "select foo, sum(foo) as fooSum, sum(bar) as barSum from user group by foo having fooSum+sum(bar) = 42", "Instructions": { "OperatorType": "Filter", - "Predicate": "sum(foo) + sum(bar) = 42", + "Predicate": "sum(`user`.foo) + sum(bar) = 42", "ResultColumns": 3, "Inputs": [ { @@ -3333,10 +3313,10 @@ }, { "comment": "group by and ',' joins", - "query": "select user.id from user, user_extra group by id", + "query": "select user.id from user, user_extra group by user.id", "plan": { "QueryType": "SELECT", - "Original": "select user.id from user, user_extra group by id", + "Original": "select user.id from user, user_extra group by user.id", "Instructions": { "OperatorType": "Aggregate", "Variant": "Ordered", @@ -3356,9 +3336,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select `user`.id, weight_string(`user`.id) from `user` where 1 != 1 group by id, weight_string(`user`.id)", + "FieldQuery": "select `user`.id, weight_string(`user`.id) from `user` where 1 != 1 group by `user`.id, weight_string(`user`.id)", "OrderBy": "(0|1) ASC", - "Query": "select `user`.id, weight_string(`user`.id) from `user` group by id, weight_string(`user`.id) order by id asc", + "Query": "select `user`.id, weight_string(`user`.id) from `user` group by `user`.id, weight_string(`user`.id) order by `user`.id asc", "Table": "`user`" }, { @@ -3560,7 +3540,7 @@ }, "FieldQuery": "select id, val1, 1, weight_string(val1) from (select id, val1 from `user` where 1 != 1) as x where 1 != 1", "OrderBy": "(1|3) ASC", - "Query": "select id, val1, 1, weight_string(val1) from (select id, val1 from `user` where val2 < 4) as x order by val1 asc limit :__upper_limit", + "Query": "select id, val1, 1, weight_string(val1) from (select id, val1 from `user` where val2 < 4) as x order by `user`.val1 asc limit :__upper_limit", "Table": "`user`" } ] @@ -4845,7 +4825,8 @@ "Instructions": { "OperatorType": "Aggregate", "Variant": "Scalar", - "Aggregates": "min(0 COLLATE latin1_swedish_ci) AS min(textcol1), max(1 COLLATE latin1_swedish_ci) AS max(textcol2), sum_distinct(2 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(3 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", + "Aggregates": "min(0 COLLATE latin1_swedish_ci) AS min(textcol1), max(1|4) AS max(textcol2), sum_distinct(2 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(3 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", + "ResultColumns": 4, "Inputs": [ { "OperatorType": "Route", @@ -4854,9 +4835,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select min(textcol1), max(textcol2), textcol1, textcol1 from `user` where 1 != 1 group by textcol1", + "FieldQuery": "select min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from `user` where 1 != 1 group by textcol1, weight_string(textcol2)", "OrderBy": "2 ASC COLLATE latin1_swedish_ci", - "Query": "select min(textcol1), max(textcol2), textcol1, textcol1 from `user` group by textcol1 order by textcol1 asc", + "Query": "select min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from `user` group by textcol1, weight_string(textcol2) order by textcol1 asc", "Table": "`user`" } ] @@ -4875,8 +4856,9 @@ "Instructions": { "OperatorType": "Aggregate", "Variant": "Ordered", - "Aggregates": "min(1 COLLATE latin1_swedish_ci) AS min(textcol1), max(2 COLLATE latin1_swedish_ci) AS max(textcol2), sum_distinct(3 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(4 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", + "Aggregates": "min(1 COLLATE latin1_swedish_ci) AS min(textcol1), max(2|5) AS max(textcol2), sum_distinct(3 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(4 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", "GroupBy": "0", + "ResultColumns": 5, "Inputs": [ { "OperatorType": "Route", @@ -4885,9 +4867,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select col, min(textcol1), max(textcol2), textcol1, textcol1 from `user` where 1 != 1 group by col, textcol1", + "FieldQuery": "select col, min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from `user` where 1 != 1 group by col, textcol1, weight_string(textcol2)", "OrderBy": "0 ASC, 3 ASC COLLATE latin1_swedish_ci", - "Query": "select col, min(textcol1), max(textcol2), textcol1, textcol1 from `user` group by col, textcol1 order by col asc, textcol1 asc", + "Query": "select col, min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from `user` group by col, textcol1, weight_string(textcol2) order by col asc, textcol1 asc", "Table": "`user`" } ] @@ -6371,5 +6353,69 @@ "user.user" ] } + }, + { + "comment": "valid but slightly confusing query should work - col in the order by should not get expanded to the column alias col", + "query": "select id, from_unixtime(min(col)) as col from user group by id order by min(col)", + "plan": { + "QueryType": "SELECT", + "Original": "select id, from_unixtime(min(col)) as col from user group by id order by min(col)", + "Instructions": { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select id, from_unixtime(min(col)) as col, min(col) from `user` where 1 != 1 group by id", + "OrderBy": "2 ASC", + "Query": "select id, from_unixtime(min(col)) as col, min(col) from `user` group by id order by min(col) asc", + "ResultColumns": 2, + "Table": "`user`" + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "col is a column on user, but the HAVING is referring to an alias", + "query": "select sum(x) col from user where x > 0 having col = 2", + "plan": { + "QueryType": "SELECT", + "Original": "select sum(x) col from user where x > 0 having col = 2", + "Instructions": { + "OperatorType": "Filter", + "Predicate": "sum(`user`.x) = 2", + "Inputs": [ + { + "OperatorType": "Aggregate", + "Variant": "Scalar", + "Aggregates": "sum(0) AS col", + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select sum(x) as col from `user` where 1 != 1", + "Query": "select sum(x) as col from `user` where x > 0", + "Table": "`user`" + } + ] + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "baz in the HAVING clause can't be accessed because of the GROUP BY", + "query": "select foo, count(bar) as x from user group by foo having baz > avg(baz) order by x", + "plan": "Unknown column 'baz' in 'having clause'" } ] \ No newline at end of file diff --git a/go/vt/vtgate/planbuilder/testdata/ddl_cases.json b/go/vt/vtgate/planbuilder/testdata/ddl_cases.json index eec1a0ce101..a645ae71d37 100644 --- a/go/vt/vtgate/planbuilder/testdata/ddl_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/ddl_cases.json @@ -260,7 +260,7 @@ "Name": "main", "Sharded": false }, - "Query": "create view view_a as select a.col1, a.col2 from (select col1, col2 from unsharded where id = 1 union select col1, col2 from unsharded where id = 3) as a" + "Query": "create view view_a as select * from (select col1, col2 from unsharded where id = 1 union select col1, col2 from unsharded where id = 3) as a" }, "TablesUsed": [ "main.view_a" diff --git a/go/vt/vtgate/planbuilder/testdata/ddl_cases_no_default_keyspace.json b/go/vt/vtgate/planbuilder/testdata/ddl_cases_no_default_keyspace.json index d05631cbff5..b0d77d0062e 100644 --- a/go/vt/vtgate/planbuilder/testdata/ddl_cases_no_default_keyspace.json +++ b/go/vt/vtgate/planbuilder/testdata/ddl_cases_no_default_keyspace.json @@ -163,7 +163,7 @@ "Name": "user", "Sharded": true }, - "Query": "create view view_a as select a.user_id, a.col1, a.col2 from authoritative as a" + "Query": "create view view_a as select user_id, col1, col2 from authoritative as a" }, "TablesUsed": [ "user.view_a" @@ -472,7 +472,7 @@ "Name": "user", "Sharded": true }, - "Query": "create view view_a as select user0_.col as col0_ from `user` as user0_ where id = 1 order by col0_ desc" + "Query": "create view view_a as select user0_.col as col0_ from `user` as user0_ where id = 1 order by user0_.col desc" }, "TablesUsed": [ "user.view_a" diff --git a/go/vt/vtgate/planbuilder/testdata/dml_cases.json b/go/vt/vtgate/planbuilder/testdata/dml_cases.json index ab9c3f5728a..ce4cd48f258 100644 --- a/go/vt/vtgate/planbuilder/testdata/dml_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/dml_cases.json @@ -4903,58 +4903,58 @@ } }, { - "QueryType": "delete comment", - "Original": "/* comment1 */ delete /* comment2 */ from /* comment3 */ user /* comment4 */ ", - "Instructions": { - "OperatorType": "Delete", - "Variant": "Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "TargetTabletType": "PRIMARY", - "KsidLength": 1, - "KsidVindex": "user_index", - "OwnedVindexQuery": "select Id, `Name`, Costly from `user` for update", - "Query": "delete /* comment2 */ from `user`", - "Table": "user" - }, - "TablesUsed": [ - "user.user" - ] - }, - { - "comment": "update Comments", - "query": "/* comment1 */ update /* comment2 */ user /* comment3 */ set /* comment4 */ val = 1 /* comment5 */ where id = 1 /* comment6 */", + "comment": "Delete with routed table on music", + "query": "delete from second_user.bar", + "plan": { + "QueryType": "DELETE", + "Original": "delete from second_user.bar", + "Instructions": { + "OperatorType": "Delete", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "TargetTabletType": "PRIMARY", + "KsidLength": 1, + "KsidVindex": "user_index", + "OwnedVindexQuery": "select user_id, id from music as bar for update", + "Query": "delete from music as bar", + "Table": "music" + }, + "TablesUsed": [ + "user.music" + ] + } + }, + { + "comment": "Update with routed table on music", + "query": "update second_user.bar set col = 23", "plan": { "QueryType": "UPDATE", - "Original": "/* comment1 */ update /* comment2 */ user /* comment3 */ set /* comment4 */ val = 1 /* comment5 */ where id = 1 /* comment6 */", + "Original": "update second_user.bar set col = 23", "Instructions": { "OperatorType": "Update", - "Variant": "EqualUnique", + "Variant": "Scatter", "Keyspace": { "Name": "user", "Sharded": true }, "TargetTabletType": "PRIMARY", - "Query": "update /* comment2 */ `user` set val = 1 where id = 1", - "Table": "user", - "Values": [ - "INT64(1)" - ], - "Vindex": "user_index" + "Query": "update music as bar set col = 23", + "Table": "music" }, "TablesUsed": [ - "user.user" + "user.music" ] } }, { - "comment": "insert Comments", - "query": "/* comment1 */ insert /* comment2 */ into /* comment3 */ music(user_id, id) /* comment4 */ values(1, 2) /* comment5 */", + "comment": "Insert with routed table on music", + "query": "insert into second_user.bar(id) values (2)", "plan": { "QueryType": "INSERT", - "Original": "/* comment1 */ insert /* comment2 */ into /* comment3 */ music(user_id, id) /* comment4 */ values(1, 2) /* comment5 */", + "Original": "insert into second_user.bar(id) values (2)", "Instructions": { "OperatorType": "Insert", "Variant": "Sharded", @@ -4963,11 +4963,11 @@ "Sharded": true }, "TargetTabletType": "PRIMARY", - "Query": "insert /* comment2 */ into music(user_id, id) values (:_user_id_0, :_id_0)", + "Query": "insert into music(id, user_id) values (:_id_0, :_user_id_0)", "TableName": "music", "VindexValues": { "music_user_map": "INT64(2)", - "user_index": "INT64(1)" + "user_index": "NULL" } }, "TablesUsed": [ diff --git a/go/vt/vtgate/planbuilder/testdata/filter_cases.json b/go/vt/vtgate/planbuilder/testdata/filter_cases.json index 7cf1e830447..ec4051dfe67 100644 --- a/go/vt/vtgate/planbuilder/testdata/filter_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/filter_cases.json @@ -4018,7 +4018,31 @@ "Sharded": true }, "FieldQuery": "select a + 2 as a from `user` where 1 != 1", - "Query": "select a + 2 as a from `user` where a + 2 = 42", + "Query": "select a + 2 as a from `user` where `user`.a + 2 = 42", + "Table": "`user`" + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "Order by aliases are expanded", + "query": "select a+2 as a from user order by a", + "plan": { + "QueryType": "SELECT", + "Original": "select a+2 as a from user order by a", + "Instructions": { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select a + 2 as a, weight_string(a + 2) from `user` where 1 != 1", + "OrderBy": "(0|1) ASC", + "Query": "select a + 2 as a, weight_string(a + 2) from `user` order by `user`.a + 2 asc", + "ResultColumns": 1, "Table": "`user`" }, "TablesUsed": [ diff --git a/go/vt/vtgate/planbuilder/testdata/foreignkey_cases.json b/go/vt/vtgate/planbuilder/testdata/foreignkey_cases.json index 0d3c5e4745a..c9c0acb3cc7 100644 --- a/go/vt/vtgate/planbuilder/testdata/foreignkey_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/foreignkey_cases.json @@ -1132,7 +1132,7 @@ "Sharded": false }, "FieldQuery": "select 1 from u_tbl8 left join u_tbl9 on u_tbl9.col9 = 'foo' where 1 != 1", - "Query": "select 1 from u_tbl8 left join u_tbl9 on u_tbl9.col9 = 'foo' where (u_tbl8.col8) in ::fkc_vals and u_tbl9.col9 is null limit 1 lock in share mode", + "Query": "select 1 from u_tbl8 left join u_tbl9 on u_tbl9.col9 = 'foo' where u_tbl9.col9 is null and (u_tbl8.col8) in ::fkc_vals limit 1 lock in share mode", "Table": "u_tbl8, u_tbl9" }, { @@ -1208,7 +1208,7 @@ "Sharded": false }, "FieldQuery": "select 1 from u_tbl4 left join u_tbl3 on u_tbl3.col3 = 'foo' where 1 != 1", - "Query": "select 1 from u_tbl4 left join u_tbl3 on u_tbl3.col3 = 'foo' where (u_tbl4.col4) in ::fkc_vals and u_tbl3.col3 is null limit 1 lock in share mode", + "Query": "select 1 from u_tbl4 left join u_tbl3 on u_tbl3.col3 = 'foo' where u_tbl3.col3 is null and (u_tbl4.col4) in ::fkc_vals limit 1 lock in share mode", "Table": "u_tbl3, u_tbl4" }, { @@ -1220,7 +1220,7 @@ "Sharded": false }, "FieldQuery": "select 1 from u_tbl4, u_tbl9 where 1 != 1", - "Query": "select 1 from u_tbl4, u_tbl9 where (u_tbl4.col4) in ::fkc_vals and (u_tbl9.col9) not in (('foo')) and u_tbl4.col4 = u_tbl9.col9 limit 1 lock in share mode", + "Query": "select 1 from u_tbl4, u_tbl9 where u_tbl4.col4 = u_tbl9.col9 and (u_tbl4.col4) in ::fkc_vals and ('foo' is null or (u_tbl9.col9) not in (('foo'))) and u_tbl4.col4 = u_tbl9.col9 and (u_tbl4.col4) in ::fkc_vals and ('foo' is null or (u_tbl9.col9) not in (('foo'))) limit 1 lock in share mode", "Table": "u_tbl4, u_tbl9" }, { @@ -1297,7 +1297,7 @@ "Sharded": false }, "FieldQuery": "select 1 from u_tbl4 left join u_tbl3 on u_tbl3.col3 = :v1 where 1 != 1", - "Query": "select 1 from u_tbl4 left join u_tbl3 on u_tbl3.col3 = :v1 where (u_tbl4.col4) in ::fkc_vals and u_tbl3.col3 is null limit 1 lock in share mode", + "Query": "select 1 from u_tbl4 left join u_tbl3 on u_tbl3.col3 = :v1 where u_tbl3.col3 is null and (u_tbl4.col4) in ::fkc_vals limit 1 lock in share mode", "Table": "u_tbl3, u_tbl4" }, { @@ -1309,7 +1309,7 @@ "Sharded": false }, "FieldQuery": "select 1 from u_tbl4, u_tbl9 where 1 != 1", - "Query": "select 1 from u_tbl4, u_tbl9 where (u_tbl4.col4) in ::fkc_vals and (:v1 is null or (u_tbl9.col9) not in ((:v1))) and u_tbl4.col4 = u_tbl9.col9 limit 1 lock in share mode", + "Query": "select 1 from u_tbl4, u_tbl9 where u_tbl4.col4 = u_tbl9.col9 and (u_tbl4.col4) in ::fkc_vals and (:v1 is null or (u_tbl9.col9) not in ((:v1))) and u_tbl4.col4 = u_tbl9.col9 and (u_tbl4.col4) in ::fkc_vals and (:v1 is null or (u_tbl9.col9) not in ((:v1))) limit 1 lock in share mode", "Table": "u_tbl4, u_tbl9" }, { diff --git a/go/vt/vtgate/planbuilder/testdata/from_cases.json b/go/vt/vtgate/planbuilder/testdata/from_cases.json index 149f503a634..f65bb84254d 100644 --- a/go/vt/vtgate/planbuilder/testdata/from_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/from_cases.json @@ -715,8 +715,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select m1.col from unsharded as m1 join unsharded as m2 where 1 != 1", - "Query": "select m1.col from unsharded as m1 join unsharded as m2", + "FieldQuery": "select m1.col from unsharded as m1 straight_join unsharded as m2 where 1 != 1", + "Query": "select m1.col from unsharded as m1 straight_join unsharded as m2", "Table": "unsharded" }, "TablesUsed": [ @@ -3800,8 +3800,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select t3.push_it from (select bar as push_it from (select foo as bar from (select id as foo from `user` where 1 != 1) as t1 where 1 != 1) as t2 where 1 != 1) as t3 where 1 != 1", - "Query": "select t3.push_it from (select bar as push_it from (select foo as bar from (select id as foo from `user` where id = 12) as t1) as t2) as t3", + "FieldQuery": "select push_it from (select bar as push_it from (select foo as bar from (select id as foo from `user` where 1 != 1) as t1 where 1 != 1) as t2 where 1 != 1) as t3 where 1 != 1", + "Query": "select push_it from (select bar as push_it from (select foo as bar from (select id as foo from `user` where id = 12) as t1) as t2) as t3", "Table": "`user`", "Values": [ "INT64(12)" @@ -3826,8 +3826,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where 1 != 1) as user_details_view where 1 != 1", - "Query": "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = user_extra.user_id) as user_details_view", + "FieldQuery": "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where 1 != 1) as user_details_view where 1 != 1", + "Query": "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = user_extra.user_id) as user_details_view", "Table": "`user`, user_extra" }, "TablesUsed": [ @@ -3849,8 +3849,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where 1 != 1) as user_details_view where 1 != 1", - "Query": "select user_details_view.id, user_details_view.col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = user_extra.user_id) as user_details_view", + "FieldQuery": "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where 1 != 1) as user_details_view where 1 != 1", + "Query": "select id, col from (select `user`.id, user_extra.col from `user`, user_extra where `user`.id = user_extra.user_id) as user_details_view", "Table": "`user`, user_extra" }, "TablesUsed": [ @@ -3993,8 +3993,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select A.col1 as col1, A.col2 as col2, B.col2 as col2 from unsharded_authoritative as A left join unsharded_authoritative as B on A.col1 = B.col1 where 1 != 1", - "Query": "select A.col1 as col1, A.col2 as col2, B.col2 as col2 from unsharded_authoritative as A left join unsharded_authoritative as B on A.col1 = B.col1", + "FieldQuery": "select * from unsharded_authoritative as A left join unsharded_authoritative as B using (col1) where 1 != 1", + "Query": "select * from unsharded_authoritative as A left join unsharded_authoritative as B using (col1)", "Table": "unsharded_authoritative" }, "TablesUsed": [ diff --git a/go/vt/vtgate/planbuilder/testdata/info_schema57_cases.json b/go/vt/vtgate/planbuilder/testdata/info_schema57_cases.json index 7380b203cdd..e374794627f 100644 --- a/go/vt/vtgate/planbuilder/testdata/info_schema57_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/info_schema57_cases.json @@ -307,7 +307,7 @@ "Sharded": false }, "FieldQuery": "select kcu.constraint_name as constraint_name, kcu.column_name as column_name, kcu.referenced_table_name as referenced_table_name, kcu.referenced_column_name as referenced_column_name, kcu.ordinal_position as ordinal_position, kcu.table_name as table_name from information_schema.key_column_usage as kcu where 1 != 1", - "Query": "select kcu.constraint_name as constraint_name, kcu.column_name as column_name, kcu.referenced_table_name as referenced_table_name, kcu.referenced_column_name as referenced_column_name, kcu.ordinal_position as ordinal_position, kcu.table_name as table_name from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.referenced_column_name is not null order by ordinal_position asc", + "Query": "select kcu.constraint_name as constraint_name, kcu.column_name as column_name, kcu.referenced_table_name as referenced_table_name, kcu.referenced_column_name as referenced_column_name, kcu.ordinal_position as ordinal_position, kcu.table_name as table_name from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.referenced_column_name is not null order by kcu.ordinal_position asc", "SysTableTableSchema": "[:v1]", "Table": "information_schema.key_column_usage" }, @@ -683,8 +683,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select x.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", - "Query": "select x.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", + "FieldQuery": "select x.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", + "Query": "select x.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", "Table": "information_schema.key_column_usage" } } @@ -711,8 +711,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select x.table_name, x.COLUMN_NAME from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", - "Query": "select x.table_name, x.COLUMN_NAME from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", + "FieldQuery": "select x.table_name, x.COLUMN_NAME from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", + "Query": "select x.table_name, x.COLUMN_NAME from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", "Table": "information_schema.key_column_usage" }, { @@ -769,8 +769,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select a.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where 1 != 1) as b where 1 != 1", - "Query": "select a.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where a.table_name = :a_table_name /* VARCHAR */) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where table_name = :table_name /* VARCHAR */) as b", + "FieldQuery": "select a.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where 1 != 1) as b where 1 != 1", + "Query": "select a.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where a.table_name = :a_table_name /* VARCHAR */) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where table_name = :table_name /* VARCHAR */) as b", "SysTableTableName": "[a_table_name:VARCHAR(\"users\"), table_name:VARCHAR(\"users\")]", "Table": "information_schema.key_column_usage, information_schema.referential_constraints" } diff --git a/go/vt/vtgate/planbuilder/testdata/info_schema80_cases.json b/go/vt/vtgate/planbuilder/testdata/info_schema80_cases.json index 7322b1a4ad2..f05ad3be125 100644 --- a/go/vt/vtgate/planbuilder/testdata/info_schema80_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/info_schema80_cases.json @@ -307,7 +307,7 @@ "Sharded": false }, "FieldQuery": "select kcu.constraint_name as constraint_name, kcu.column_name as column_name, kcu.referenced_table_name as referenced_table_name, kcu.referenced_column_name as referenced_column_name, kcu.ordinal_position as ordinal_position, kcu.table_name as table_name from information_schema.key_column_usage as kcu where 1 != 1", - "Query": "select kcu.constraint_name as constraint_name, kcu.column_name as column_name, kcu.referenced_table_name as referenced_table_name, kcu.referenced_column_name as referenced_column_name, kcu.ordinal_position as ordinal_position, kcu.table_name as table_name from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.referenced_column_name is not null order by ordinal_position asc", + "Query": "select kcu.constraint_name as constraint_name, kcu.column_name as column_name, kcu.referenced_table_name as referenced_table_name, kcu.referenced_column_name as referenced_column_name, kcu.ordinal_position as ordinal_position, kcu.table_name as table_name from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.referenced_column_name is not null order by kcu.ordinal_position asc", "SysTableTableSchema": "[:v1]", "Table": "information_schema.key_column_usage" }, @@ -748,8 +748,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select x.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", - "Query": "select x.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", + "FieldQuery": "select x.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", + "Query": "select x.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", "Table": "information_schema.key_column_usage" } } @@ -776,8 +776,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select x.table_name, x.COLUMN_NAME from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", - "Query": "select x.table_name, x.COLUMN_NAME from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", + "FieldQuery": "select x.table_name, x.COLUMN_NAME from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as x where 1 != 1", + "Query": "select x.table_name, x.COLUMN_NAME from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a) as x", "Table": "information_schema.key_column_usage" }, { @@ -834,8 +834,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select a.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where 1 != 1) as b where 1 != 1", - "Query": "select a.table_name from (select a.CONSTRAINT_CATALOG, a.CONSTRAINT_SCHEMA, a.CONSTRAINT_NAME, a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME, a.ORDINAL_POSITION, a.POSITION_IN_UNIQUE_CONSTRAINT, a.REFERENCED_TABLE_SCHEMA, a.REFERENCED_TABLE_NAME, a.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where a.table_name = :a_table_name /* VARCHAR */) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where table_name = :table_name /* VARCHAR */) as b", + "FieldQuery": "select a.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where 1 != 1) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where 1 != 1) as b where 1 != 1", + "Query": "select a.table_name from (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as a where a.table_name = :a_table_name /* VARCHAR */) as a, (select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UNIQUE_CONSTRAINT_CATALOG, UNIQUE_CONSTRAINT_SCHEMA, UNIQUE_CONSTRAINT_NAME, MATCH_OPTION, UPDATE_RULE, DELETE_RULE, TABLE_NAME, REFERENCED_TABLE_NAME from information_schema.referential_constraints where table_name = :table_name /* VARCHAR */) as b", "SysTableTableName": "[a_table_name:VARCHAR(\"users\"), table_name:VARCHAR(\"users\")]", "Table": "information_schema.key_column_usage, information_schema.referential_constraints" } diff --git a/go/vt/vtgate/planbuilder/testdata/memory_sort_cases.json b/go/vt/vtgate/planbuilder/testdata/memory_sort_cases.json index 58e6744f1a6..3ad80eff59f 100644 --- a/go/vt/vtgate/planbuilder/testdata/memory_sort_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/memory_sort_cases.json @@ -24,9 +24,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(b) from `user` where 1 != 1 group by a, weight_string(a)", + "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(`user`.b) from `user` where 1 != 1 group by a, weight_string(a)", "OrderBy": "(0|3) ASC", - "Query": "select a, b, count(*), weight_string(a), weight_string(b) from `user` group by a, weight_string(a) order by a asc", + "Query": "select a, b, count(*), weight_string(a), weight_string(`user`.b) from `user` group by a, weight_string(a) order by a asc", "Table": "`user`" } ] @@ -102,9 +102,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, b, count(*) as k, weight_string(a), weight_string(b) from `user` where 1 != 1 group by a, weight_string(a)", + "FieldQuery": "select a, b, count(*) as k, weight_string(a), weight_string(`user`.b) from `user` where 1 != 1 group by a, weight_string(a)", "OrderBy": "(0|3) ASC", - "Query": "select a, b, count(*) as k, weight_string(a), weight_string(b) from `user` group by a, weight_string(a) order by a asc", + "Query": "select a, b, count(*) as k, weight_string(a), weight_string(`user`.b) from `user` group by a, weight_string(a) order by a asc", "Table": "`user`" } ] @@ -259,7 +259,7 @@ }, "FieldQuery": "select id, weight_string(id) from (select `user`.id, `user`.col from `user` where 1 != 1) as t where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id, weight_string(id) from (select `user`.id, `user`.col from `user`) as t order by id asc", + "Query": "select id, weight_string(id) from (select `user`.id, `user`.col from `user`) as t order by t.id asc", "Table": "`user`" }, { @@ -552,9 +552,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, convert(a, binary), weight_string(convert(a, binary)) from `user` where 1 != 1", + "FieldQuery": "select a, convert(`user`.a, binary), weight_string(convert(`user`.a, binary)) from `user` where 1 != 1", "OrderBy": "(1|2) DESC", - "Query": "select a, convert(a, binary), weight_string(convert(a, binary)) from `user` order by convert(a, binary) desc", + "Query": "select a, convert(`user`.a, binary), weight_string(convert(`user`.a, binary)) from `user` order by convert(`user`.a, binary) desc", "ResultColumns": 1, "Table": "`user`" }, @@ -585,9 +585,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select u.a, convert(a, binary), weight_string(convert(a, binary)) from `user` as u where 1 != 1", + "FieldQuery": "select u.a, convert(u.a, binary), weight_string(convert(u.a, binary)) from `user` as u where 1 != 1", "OrderBy": "(1|2) DESC", - "Query": "select u.a, convert(a, binary), weight_string(convert(a, binary)) from `user` as u order by convert(a, binary) desc", + "Query": "select u.a, convert(u.a, binary), weight_string(convert(u.a, binary)) from `user` as u order by convert(u.a, binary) desc", "Table": "`user`" }, { @@ -624,7 +624,7 @@ }, "FieldQuery": "select id, intcol from `user` where 1 != 1", "OrderBy": "1 ASC", - "Query": "select id, intcol from `user` order by intcol asc", + "Query": "select id, intcol from `user` order by `user`.intcol asc", "Table": "`user`" }, "TablesUsed": [ diff --git a/go/vt/vtgate/planbuilder/testdata/oltp_cases.json b/go/vt/vtgate/planbuilder/testdata/oltp_cases.json index d470250531e..1db643580dd 100644 --- a/go/vt/vtgate/planbuilder/testdata/oltp_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/oltp_cases.json @@ -91,7 +91,7 @@ }, "FieldQuery": "select c from sbtest1 where 1 != 1", "OrderBy": "0 ASC COLLATE latin1_swedish_ci", - "Query": "select c from sbtest1 where id between 50 and 235 order by c asc", + "Query": "select c from sbtest1 where id between 50 and 235 order by sbtest1.c asc", "Table": "sbtest1" }, "TablesUsed": [ @@ -119,7 +119,7 @@ }, "FieldQuery": "select c from sbtest30 where 1 != 1 group by c", "OrderBy": "0 ASC COLLATE latin1_swedish_ci", - "Query": "select c from sbtest30 where id between 1 and 10 group by c order by c asc", + "Query": "select c from sbtest30 where id between 1 and 10 group by c order by sbtest30.c asc", "Table": "sbtest30" } ] diff --git a/go/vt/vtgate/planbuilder/testdata/postprocess_cases.json b/go/vt/vtgate/planbuilder/testdata/postprocess_cases.json index fe9a7d49a18..996c7454e73 100644 --- a/go/vt/vtgate/planbuilder/testdata/postprocess_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/postprocess_cases.json @@ -145,7 +145,7 @@ "Sharded": true }, "FieldQuery": "select id from `user` where 1 != 1", - "Query": "select id from `user` where :__sq_has_values and id in ::__vals", + "Query": "select id from `user` where :__sq_has_values and `user`.id in ::__vals", "Table": "`user`", "Values": [ "::__sq1" @@ -226,7 +226,7 @@ }, "FieldQuery": "select col from `user` where 1 != 1", "OrderBy": "0 ASC", - "Query": "select col from `user` order by col asc", + "Query": "select col from `user` order by `user`.col asc", "Table": "`user`" }, "TablesUsed": [ @@ -249,7 +249,7 @@ }, "FieldQuery": "select user_id, col1, col2, weight_string(user_id) from authoritative where 1 != 1", "OrderBy": "(0|3) ASC", - "Query": "select user_id, col1, col2, weight_string(user_id) from authoritative order by user_id asc", + "Query": "select user_id, col1, col2, weight_string(user_id) from authoritative order by authoritative.user_id asc", "ResultColumns": 3, "Table": "authoritative" }, @@ -273,7 +273,7 @@ }, "FieldQuery": "select user_id, col1, col2 from authoritative where 1 != 1", "OrderBy": "1 ASC COLLATE latin1_swedish_ci", - "Query": "select user_id, col1, col2 from authoritative order by col1 asc", + "Query": "select user_id, col1, col2 from authoritative order by authoritative.col1 asc", "Table": "authoritative" }, "TablesUsed": [ @@ -296,7 +296,7 @@ }, "FieldQuery": "select a, textcol1, b, weight_string(a), weight_string(b) from `user` where 1 != 1", "OrderBy": "(0|3) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|4) ASC", - "Query": "select a, textcol1, b, weight_string(a), weight_string(b) from `user` order by a asc, textcol1 asc, b asc", + "Query": "select a, textcol1, b, weight_string(a), weight_string(b) from `user` order by `user`.a asc, `user`.textcol1 asc, `user`.b asc", "ResultColumns": 3, "Table": "`user`" }, @@ -320,7 +320,7 @@ }, "FieldQuery": "select a, `user`.textcol1, b, weight_string(a), weight_string(b) from `user` where 1 != 1", "OrderBy": "(0|3) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|4) ASC", - "Query": "select a, `user`.textcol1, b, weight_string(a), weight_string(b) from `user` order by a asc, textcol1 asc, b asc", + "Query": "select a, `user`.textcol1, b, weight_string(a), weight_string(b) from `user` order by `user`.a asc, `user`.textcol1 asc, `user`.b asc", "ResultColumns": 3, "Table": "`user`" }, @@ -342,9 +342,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b) from `user` where 1 != 1", - "OrderBy": "(0|4) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|5) ASC, 3 ASC COLLATE latin1_swedish_ci", - "Query": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b) from `user` order by a asc, textcol1 asc, b asc, textcol2 asc", + "FieldQuery": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b), weight_string(textcol2) from `user` where 1 != 1", + "OrderBy": "(0|4) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|5) ASC, (3|6) ASC COLLATE ", + "Query": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b), weight_string(textcol2) from `user` order by `user`.a asc, `user`.textcol1 asc, `user`.b asc, `user`.textcol2 asc", "ResultColumns": 4, "Table": "`user`" }, @@ -373,7 +373,7 @@ }, "FieldQuery": "select id as foo, weight_string(id) from music where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id as foo, weight_string(id) from music order by foo asc", + "Query": "select id as foo, weight_string(id) from music order by id asc", "ResultColumns": 1, "Table": "music" }, @@ -440,7 +440,7 @@ }, "FieldQuery": "select col from `user` where 1 != 1", "OrderBy": "0 ASC", - "Query": "select col from `user` where :__sq_has_values and col in ::__sq1 order by col asc", + "Query": "select col from `user` where :__sq_has_values and col in ::__sq1 order by `user`.col asc", "Table": "`user`" } ] @@ -526,7 +526,7 @@ "Sharded": true }, "FieldQuery": "select `user`.col1 as a, `user`.col2, `user`.id from `user` where 1 != 1", - "Query": "select `user`.col1 as a, `user`.col2, `user`.id from `user` where `user`.id = 1 order by a asc", + "Query": "select `user`.col1 as a, `user`.col2, `user`.id from `user` where `user`.id = 1 order by `user`.col1 asc", "Table": "`user`", "Values": [ "INT64(1)" @@ -579,7 +579,7 @@ "Sharded": true }, "FieldQuery": "select `user`.col1 as a, `user`.col2, `user`.id from `user` where 1 != 1", - "Query": "select `user`.col1 as a, `user`.col2, `user`.id from `user` where `user`.id = 1 order by a asc", + "Query": "select `user`.col1 as a, `user`.col2, `user`.id from `user` where `user`.id = 1 order by `user`.col1 asc", "Table": "`user`", "Values": [ "INT64(1)" @@ -1079,7 +1079,7 @@ "Sharded": true }, "FieldQuery": "select col from `user` as route1 where 1 != 1", - "Query": "select col from `user` as route1 where id = 1 order by col asc", + "Query": "select col from `user` as route1 where id = 1 order by route1.col asc", "Table": "`user`", "Values": [ "INT64(1)" @@ -1365,7 +1365,7 @@ }, "FieldQuery": "select id as foo, weight_string(id) from music where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id as foo, weight_string(id) from music order by foo asc", + "Query": "select id as foo, weight_string(id) from music order by music.id asc", "ResultColumns": 1, "Table": "music" }, @@ -1389,7 +1389,7 @@ }, "FieldQuery": "select id as foo, id2 as id, weight_string(id2) from music where 1 != 1", "OrderBy": "(1|2) ASC", - "Query": "select id as foo, id2 as id, weight_string(id2) from music order by id asc", + "Query": "select id as foo, id2 as id, weight_string(id2) from music order by music.id2 asc", "ResultColumns": 2, "Table": "music" }, @@ -1419,7 +1419,7 @@ }, "FieldQuery": "select `name`, weight_string(`name`) from `user` where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select `name`, weight_string(`name`) from `user` order by `name` asc", + "Query": "select `name`, weight_string(`name`) from `user` order by `user`.`name` asc", "Table": "`user`" }, { @@ -1606,7 +1606,7 @@ }, "FieldQuery": "select `name`, weight_string(`name`) from `user` where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select `name`, weight_string(`name`) from `user` order by `name` asc", + "Query": "select `name`, weight_string(`name`) from `user` order by `user`.`name` asc", "Table": "`user`" }, { @@ -1645,7 +1645,7 @@ }, "FieldQuery": "select id, id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|2) ASC", - "Query": "select id, id, weight_string(id) from `user` order by id asc", + "Query": "select id, id, weight_string(id) from `user` order by `user`.id asc", "ResultColumns": 2, "Table": "`user`" }, @@ -1902,9 +1902,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select `user`.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from `user` where 1 != 1", + "FieldQuery": "select `user`.col1 as a, `user`.col1 collate utf8_general_ci, weight_string(`user`.col1 collate utf8_general_ci) from `user` where 1 != 1", "OrderBy": "(1|2) ASC", - "Query": "select `user`.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from `user` order by a collate utf8_general_ci asc", + "Query": "select `user`.col1 as a, `user`.col1 collate utf8_general_ci, weight_string(`user`.col1 collate utf8_general_ci) from `user` order by `user`.col1 collate utf8_general_ci asc", "ResultColumns": 1, "Table": "`user`" }, @@ -1950,9 +1950,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select `user`.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from `user` where 1 != 1", + "FieldQuery": "select `user`.col1 as a, `user`.col1 collate utf8_general_ci, weight_string(`user`.col1 collate utf8_general_ci) from `user` where 1 != 1", "OrderBy": "(1|2) ASC", - "Query": "select `user`.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from `user` order by a collate utf8_general_ci asc", + "Query": "select `user`.col1 as a, `user`.col1 collate utf8_general_ci, weight_string(`user`.col1 collate utf8_general_ci) from `user` order by `user`.col1 collate utf8_general_ci asc", "ResultColumns": 1, "Table": "`user`" }, @@ -2095,7 +2095,7 @@ }, "FieldQuery": "select col from `user` where 1 != 1 group by col", "OrderBy": "0 ASC", - "Query": "select col from `user` where id between :vtg1 and :vtg2 group by col order by col asc", + "Query": "select col from `user` where id between :vtg1 and :vtg2 group by col order by `user`.col asc", "Table": "`user`" } ] @@ -2126,7 +2126,7 @@ }, "FieldQuery": "select foo, col, weight_string(foo) from `user` where 1 != 1 group by col, foo, weight_string(foo)", "OrderBy": "1 ASC, (0|2) ASC", - "Query": "select foo, col, weight_string(foo) from `user` where id between :vtg1 and :vtg2 group by col, foo, weight_string(foo) order by col asc, foo asc", + "Query": "select foo, col, weight_string(foo) from `user` where id between :vtg1 and :vtg2 group by col, foo, weight_string(foo) order by `user`.col asc, foo asc", "Table": "`user`" } ] @@ -2174,5 +2174,170 @@ "user.user" ] } + }, + { + "comment": "DISTINCT on an unsupported collation should fall back on weightstrings", + "query": "select distinct textcol2 from user", + "plan": { + "QueryType": "SELECT", + "Original": "select distinct textcol2 from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "(0:1): " + ], + "ResultColumns": 1, + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select textcol2, weight_string(textcol2) from `user` where 1 != 1", + "Query": "select distinct textcol2, weight_string(textcol2) from `user`", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "ORDER BY literal works fine even when the columns have the same name", + "query": "select a.id, b.id from user as a, user_extra as b union all select 1, 2 order by 1", + "plan": { + "QueryType": "SELECT", + "Original": "select a.id, b.id from user as a, user_extra as b union all select 1, 2 order by 1", + "Instructions": { + "OperatorType": "Sort", + "Variant": "Memory", + "OrderBy": "(0|2) ASC", + "ResultColumns": 2, + "Inputs": [ + { + "OperatorType": "Concatenate", + "Inputs": [ + { + "OperatorType": "Join", + "Variant": "Join", + "JoinColumnIndexes": "L:0,R:0,L:1", + "TableName": "`user`_user_extra", + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select a.id, weight_string(a.id) from `user` as a where 1 != 1", + "Query": "select a.id, weight_string(a.id) from `user` as a", + "Table": "`user`" + }, + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select b.id from user_extra as b where 1 != 1", + "Query": "select b.id from user_extra as b", + "Table": "user_extra" + } + ] + }, + { + "OperatorType": "Route", + "Variant": "Reference", + "Keyspace": { + "Name": "main", + "Sharded": false + }, + "FieldQuery": "select 1, 2, weight_string(1) from dual where 1 != 1", + "Query": "select 1, 2, weight_string(1) from dual", + "Table": "dual" + } + ] + } + ] + }, + "TablesUsed": [ + "main.dual", + "user.user", + "user.user_extra" + ] + } + }, + { + "comment": "ORDER BY literal works fine even when the columns have the same name", + "query": "select a.id, b.id from user as a, user_extra as b union all select 1, 2 order by 2", + "plan": { + "QueryType": "SELECT", + "Original": "select a.id, b.id from user as a, user_extra as b union all select 1, 2 order by 2", + "Instructions": { + "OperatorType": "Sort", + "Variant": "Memory", + "OrderBy": "(1|2) ASC", + "ResultColumns": 2, + "Inputs": [ + { + "OperatorType": "Concatenate", + "Inputs": [ + { + "OperatorType": "Join", + "Variant": "Join", + "JoinColumnIndexes": "L:0,R:0,R:1", + "TableName": "`user`_user_extra", + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select a.id from `user` as a where 1 != 1", + "Query": "select a.id from `user` as a", + "Table": "`user`" + }, + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select b.id, weight_string(b.id) from user_extra as b where 1 != 1", + "Query": "select b.id, weight_string(b.id) from user_extra as b", + "Table": "user_extra" + } + ] + }, + { + "OperatorType": "Route", + "Variant": "Reference", + "Keyspace": { + "Name": "main", + "Sharded": false + }, + "FieldQuery": "select 1, 2, weight_string(2) from dual where 1 != 1", + "Query": "select 1, 2, weight_string(2) from dual", + "Table": "dual" + } + ] + } + ] + }, + "TablesUsed": [ + "main.dual", + "user.user", + "user.user_extra" + ] + } } ] \ No newline at end of file diff --git a/go/vt/vtgate/planbuilder/testdata/select_cases.json b/go/vt/vtgate/planbuilder/testdata/select_cases.json index 5ff46689a91..01976ba6ca7 100644 --- a/go/vt/vtgate/planbuilder/testdata/select_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/select_cases.json @@ -388,8 +388,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a.user_id, a.col1, a.col2 from authoritative as a where 1 != 1", - "Query": "select a.user_id, a.col1, a.col2 from authoritative as a", + "FieldQuery": "select user_id, col1, col2 from authoritative as a where 1 != 1", + "Query": "select user_id, col1, col2 from authoritative as a", "Table": "authoritative" }, "TablesUsed": [ @@ -1000,7 +1000,7 @@ }, "FieldQuery": "select user_id, weight_string(user_id) from music where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select user_id, weight_string(user_id) from music order by user_id asc limit :__upper_limit", + "Query": "select user_id, weight_string(user_id) from music order by music.user_id asc limit :__upper_limit", "ResultColumns": 1, "Table": "music" } @@ -1129,7 +1129,7 @@ "Sharded": true }, "FieldQuery": "select user0_.col as col0_ from `user` as user0_ where 1 != 1", - "Query": "select user0_.col as col0_ from `user` as user0_ where id = 1 order by col0_ desc limit 3", + "Query": "select user0_.col as col0_ from `user` as user0_ where id = 1 order by user0_.col desc limit 3", "Table": "`user`", "Values": [ "INT64(1)" @@ -1328,8 +1328,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select t.id1 from (select `user`.id as id1 from `user` where 1 != 1) as t where 1 != 1", - "Query": "select t.id1 from (select `user`.id as id1 from `user`) as t", + "FieldQuery": "select id1 from (select `user`.id as id1 from `user` where 1 != 1) as t where 1 != 1", + "Query": "select id1 from (select `user`.id as id1 from `user`) as t", "Table": "`user`" }, { @@ -1339,8 +1339,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select t.id2 from (select user_extra.id as id2 from user_extra where 1 != 1) as t where 1 != 1", - "Query": "select t.id2 from (select user_extra.id as id2 from user_extra) as t", + "FieldQuery": "select id2 from (select user_extra.id as id2 from user_extra where 1 != 1) as t where 1 != 1", + "Query": "select id2 from (select user_extra.id as id2 from user_extra) as t", "Table": "user_extra" } ] @@ -1428,8 +1428,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select a.col1, a.col2 from (select col1, col2 from unsharded where 1 != 1 union select col1, col2 from unsharded where 1 != 1) as a where 1 != 1", - "Query": "select a.col1, a.col2 from (select col1, col2 from unsharded where id = 1 union select col1, col2 from unsharded where id = 3) as a", + "FieldQuery": "select * from (select col1, col2 from unsharded where 1 != 1 union select col1, col2 from unsharded where 1 != 1) as a where 1 != 1", + "Query": "select * from (select col1, col2 from unsharded where id = 1 union select col1, col2 from unsharded where id = 3) as a", "Table": "unsharded" }, "TablesUsed": [ @@ -1573,6 +1573,28 @@ ] } }, + { + "comment": "routing table on music", + "query": "select * from second_user.bar where id > 2", + "plan": { + "QueryType": "SELECT", + "Original": "select * from second_user.bar where id > 2", + "Instructions": { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select * from music as bar where 1 != 1", + "Query": "select * from music as bar where id > 2", + "Table": "music" + }, + "TablesUsed": [ + "user.music" + ] + } + }, { "comment": "testing SingleRow Projection", "query": "select 42", @@ -1784,7 +1806,7 @@ }, "FieldQuery": "select user_id, count(id), weight_string(user_id) from music where 1 != 1 group by user_id", "OrderBy": "(0|2) ASC", - "Query": "select user_id, count(id), weight_string(user_id) from music group by user_id having count(user_id) = 1 order by user_id asc limit :__upper_limit", + "Query": "select user_id, count(id), weight_string(user_id) from music group by user_id having count(user_id) = 1 order by music.user_id asc limit :__upper_limit", "ResultColumns": 2, "Table": "music" } @@ -2020,77 +2042,6 @@ ] } }, - { - "comment": "select (select col from user limit 1) as a from user join user_extra order by a", - "query": "select (select col from user limit 1) as a from user join user_extra order by a", - "plan": { - "QueryType": "SELECT", - "Original": "select (select col from user limit 1) as a from user join user_extra order by a", - "Instructions": { - "OperatorType": "Join", - "Variant": "Join", - "JoinColumnIndexes": "L:0", - "TableName": "`user`_user_extra", - "Inputs": [ - { - "OperatorType": "UncorrelatedSubquery", - "Variant": "PulloutValue", - "PulloutVars": [ - "__sq1" - ], - "Inputs": [ - { - "InputName": "SubQuery", - "OperatorType": "Limit", - "Count": "INT64(1)", - "Inputs": [ - { - "OperatorType": "Route", - "Variant": "Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select col from `user` where 1 != 1", - "Query": "select col from `user` limit :__upper_limit", - "Table": "`user`" - } - ] - }, - { - "InputName": "Outer", - "OperatorType": "Route", - "Variant": "Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select :__sq1 as a, weight_string(:__sq1) from `user` where 1 != 1", - "OrderBy": "(0|1) ASC", - "Query": "select :__sq1 as a, weight_string(:__sq1) from `user` order by a asc", - "Table": "`user`" - } - ] - }, - { - "OperatorType": "Route", - "Variant": "Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select 1 from user_extra where 1 != 1", - "Query": "select 1 from user_extra", - "Table": "user_extra" - } - ] - }, - "TablesUsed": [ - "user.user", - "user.user_extra" - ] - } - }, { "comment": "select t.a from (select (select col from user limit 1) as a from user join user_extra) t", "query": "select t.a from (select (select col from user limit 1) as a from user join user_extra) t", @@ -2314,7 +2265,7 @@ }, "FieldQuery": "select col, `user`.id from `user` where 1 != 1", "OrderBy": "0 ASC", - "Query": "select col, `user`.id from `user` order by col asc", + "Query": "select col, `user`.id from `user` order by `user`.col asc", "Table": "`user`" }, { @@ -2615,7 +2566,7 @@ "Sharded": false }, "FieldQuery": "select 1 from (select col, count(*) as a from unsharded where 1 != 1 group by col) as f left join unsharded as u on f.col = u.id where 1 != 1", - "Query": "select 1 from (select col, count(*) as a from unsharded group by col having count(*) > 0 limit 0, 12) as f left join unsharded as u on f.col = u.id", + "Query": "select 1 from (select col, count(*) as a from unsharded group by col having a > 0 limit 0, 12) as f left join unsharded as u on f.col = u.id", "Table": "unsharded" }, "TablesUsed": [ @@ -2740,7 +2691,7 @@ }, "FieldQuery": "select id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id, weight_string(id) from `user` order by id asc limit :__upper_limit", + "Query": "select id, weight_string(id) from `user` order by `user`.id asc limit :__upper_limit", "Table": "`user`" } ] @@ -2753,8 +2704,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select :__sq1 as `(select id from ``user`` order by id asc limit 1)` from user_extra where 1 != 1", - "Query": "select :__sq1 as `(select id from ``user`` order by id asc limit 1)` from user_extra", + "FieldQuery": "select :__sq1 as `(select id from ``user`` order by ``user``.id asc limit 1)` from user_extra where 1 != 1", + "Query": "select :__sq1 as `(select id from ``user`` order by ``user``.id asc limit 1)` from user_extra", "Table": "user_extra" } ] @@ -3230,7 +3181,7 @@ }, "FieldQuery": "select id, `name`, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|2) ASC", - "Query": "select id, `name`, weight_string(id) from `user` where `name` = 'aa' order by id asc limit :__upper_limit", + "Query": "select id, `name`, weight_string(id) from `user` where `name` = 'aa' order by `user`.id asc limit :__upper_limit", "ResultColumns": 2, "Table": "`user`" } @@ -3381,7 +3332,7 @@ "Sharded": true }, "FieldQuery": "select music.id from music where 1 != 1", - "Query": "select music.id from music where music.id in (select _inner.id from (select music.id from music where music.user_id in (1, 2, 3)) as _inner)", + "Query": "select music.id from music where music.id in (select id from (select music.id from music where music.user_id in (1, 2, 3)) as _inner)", "Table": "music", "Values": [ "(INT64(1), INT64(2), INT64(3))" @@ -3901,7 +3852,7 @@ "Sharded": true }, "FieldQuery": "select music.id from music where 1 != 1", - "Query": "select music.id from music where music.id in (select subquery_for_limit.id from (select subquery_for_limit.id from (select music.id from music where music.user_id = 5 limit 10) as subquery_for_limit) as subquery_for_limit)", + "Query": "select music.id from music where music.id in (select id from (select id from (select music.id from music where music.user_id = 5 limit 10) as subquery_for_limit) as subquery_for_limit)", "Table": "music", "Values": [ "INT64(5)" @@ -3927,7 +3878,7 @@ "Sharded": true }, "FieldQuery": "select music.id from music where 1 != 1", - "Query": "select music.id from music where music.id in (select subquery_for_limit.id from (select subquery_for_limit.id from (select music.id from music where music.user_id in (5) limit 10) as subquery_for_limit) as subquery_for_limit)", + "Query": "select music.id from music where music.id in (select id from (select id from (select music.id from music where music.user_id in (5) limit 10) as subquery_for_limit) as subquery_for_limit)", "Table": "music", "Values": [ "INT64(5)" @@ -3965,8 +3916,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select subquery_for_limit.id from (select subquery_for_limit.id from (select music.id from music where 1 != 1) as subquery_for_limit where 1 != 1) as subquery_for_limit where 1 != 1", - "Query": "select subquery_for_limit.id from (select subquery_for_limit.id from (select music.id from music where music.user_id in ::__vals) as subquery_for_limit limit :__upper_limit) as subquery_for_limit limit :__upper_limit", + "FieldQuery": "select id from (select id from (select music.id from music where 1 != 1) as subquery_for_limit where 1 != 1) as subquery_for_limit where 1 != 1", + "Query": "select id from (select id from (select music.id from music where music.user_id in ::__vals) as subquery_for_limit limit :__upper_limit) as subquery_for_limit limit :__upper_limit", "Table": "music", "Values": [ "(INT64(5), INT64(6))" @@ -4024,8 +3975,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select subquery_for_limit.id from (select subquery_for_limit.id from (select music.id from music where 1 != 1) as subquery_for_limit where 1 != 1) as subquery_for_limit where 1 != 1", - "Query": "select subquery_for_limit.id from (select subquery_for_limit.id from (select music.id from music) as subquery_for_limit limit :__upper_limit) as subquery_for_limit limit :__upper_limit", + "FieldQuery": "select id from (select id from (select music.id from music where 1 != 1) as subquery_for_limit where 1 != 1) as subquery_for_limit where 1 != 1", + "Query": "select id from (select id from (select music.id from music) as subquery_for_limit limit :__upper_limit) as subquery_for_limit limit :__upper_limit", "Table": "music" } ] @@ -4832,7 +4783,7 @@ }, "FieldQuery": "select u.foo, weight_string(u.foo) from `user` as u where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select u.foo, weight_string(u.foo) from `user` as u order by foo asc", + "Query": "select u.foo, weight_string(u.foo) from `user` as u order by u.foo asc", "Table": "`user`" }, { diff --git a/go/vt/vtgate/planbuilder/testdata/sysschema_default.json b/go/vt/vtgate/planbuilder/testdata/sysschema_default.json index 1d25f0f60af..cb633955f22 100644 --- a/go/vt/vtgate/planbuilder/testdata/sysschema_default.json +++ b/go/vt/vtgate/planbuilder/testdata/sysschema_default.json @@ -77,8 +77,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select x.`1` from (select 1 from information_schema.schemata where 1 != 1) as x where 1 != 1", - "Query": "select x.`1` from (select 1 from information_schema.schemata where schema_name = :__vtschemaname /* VARCHAR */ limit 1) as x", + "FieldQuery": "select `1` from (select 1 from information_schema.schemata where 1 != 1) as x where 1 != 1", + "Query": "select `1` from (select 1 from information_schema.schemata where schema_name = :__vtschemaname /* VARCHAR */ limit 1) as x", "SysTableTableSchema": "[VARCHAR(\"MyDatabase\")]", "Table": "information_schema.schemata" } diff --git a/go/vt/vtgate/planbuilder/testdata/table_aggr_cases.json b/go/vt/vtgate/planbuilder/testdata/table_aggr_cases.json index 38aef7ecde9..41c23a5affe 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_aggr_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_aggr_cases.json @@ -226,7 +226,7 @@ { "comment": "scatter aggregate group by invalid column number", "query": "select col from t_user group by 2", - "plan": "Unknown column '2' in 'group statement'" + "plan": "Unknown column '2' in 'group clause'" }, { "comment": "scatter aggregate with numbered order by columns", @@ -282,7 +282,7 @@ }, "FieldQuery": "select a, b, c, d, count(*), weight_string(a), weight_string(b), weight_string(c) from t_user_0 where 1 != 1 group by a, b, c, weight_string(a), weight_string(b), weight_string(c)", "OrderBy": "(0|5) ASC, (1|6) ASC, (2|7) ASC", - "Query": "select a, b, c, d, count(*), weight_string(a), weight_string(b), weight_string(c) from t_user group by a, b, c, weight_string(a), weight_string(b), weight_string(c) order by a asc, b asc, c asc", + "Query": "select a, b, c, d, count(*), weight_string(a), weight_string(b), weight_string(c) from t_user group by a, b, c, weight_string(a), weight_string(b), weight_string(c) order by t_user.a asc, t_user.b asc, t_user.c asc", "Table": "t_user" } ] @@ -314,7 +314,7 @@ }, "FieldQuery": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from t_user_0 where 1 != 1 group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c)", "OrderBy": "(3|5) ASC, (1|6) ASC, (0|7) ASC, (2|8) ASC", - "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from t_user group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by d asc, b asc, a asc, c asc", + "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from t_user group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by t_user.d asc, t_user.b asc, t_user.a asc, t_user.c asc", "Table": "t_user" } ] @@ -346,7 +346,7 @@ }, "FieldQuery": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from t_user_0 where 1 != 1 group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c)", "OrderBy": "(3|5) ASC, (1|6) ASC, (0|7) ASC, (2|8) ASC", - "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from t_user group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by d asc, b asc, a asc, c asc", + "Query": "select a, b, c, d, count(*), weight_string(d), weight_string(b), weight_string(a), weight_string(c) from t_user group by d, b, a, c, weight_string(d), weight_string(b), weight_string(a), weight_string(c) order by t_user.d asc, t_user.b asc, t_user.a asc, t_user.c asc", "Table": "t_user" } ] @@ -378,7 +378,7 @@ }, "FieldQuery": "select a, b, c, count(*), weight_string(a), weight_string(c), weight_string(b) from t_user_0 where 1 != 1 group by a, c, b, weight_string(a), weight_string(c), weight_string(b)", "OrderBy": "(0|4) DESC, (2|5) DESC, (1|6) ASC", - "Query": "select a, b, c, count(*), weight_string(a), weight_string(c), weight_string(b) from t_user group by a, c, b, weight_string(a), weight_string(c), weight_string(b) order by a desc, c desc, b asc", + "Query": "select a, b, c, count(*), weight_string(a), weight_string(c), weight_string(b) from t_user group by a, c, b, weight_string(a), weight_string(c), weight_string(b) order by a desc, c desc, t_user.b asc", "Table": "t_user" } ] @@ -451,8 +451,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select t_user_0.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from t_user_0 where 1 != 1 group by a collate utf8_general_ci, weight_string(a collate utf8_general_ci)", - "Query": "select t_user.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from t_user where t_user.id = 5 group by a collate utf8_general_ci, weight_string(a collate utf8_general_ci) order by a collate utf8_general_ci asc", + "FieldQuery": "select t_user_0.col1 as a, t_user_0.col1 collate utf8_general_ci, weight_string(t_user_0.col1 collate utf8_general_ci) from t_user_0 where 1 != 1 group by t_user_0.col1 collate utf8_general_ci, weight_string(t_user_0.col1 collate utf8_general_ci)", + "Query": "select t_user.col1 as a, t_user.col1 collate utf8_general_ci, weight_string(t_user.col1 collate utf8_general_ci) from t_user where t_user.id = 5 group by t_user.col1 collate utf8_general_ci, weight_string(t_user.col1 collate utf8_general_ci) order by t_user.col1 collate utf8_general_ci asc", "Table": "t_user", "Values": [ "INT64(5)" @@ -501,7 +501,7 @@ { "comment": "Group by out of range column number (code is duplicated from symab).", "query": "select id from t_user group by 2", - "plan": "Unknown column '2' in 'group statement'" + "plan": "Unknown column '2' in 'group clause'" }, { "comment": "aggregate query with order by aggregate column along with NULL", @@ -592,9 +592,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select lower(textcol1) as v, count(*), weight_string(lower(textcol1)) from t_user_0 where 1 != 1 group by v, weight_string(lower(textcol1))", + "FieldQuery": "select lower(textcol1) as v, count(*), weight_string(lower(textcol1)) from t_user_0 where 1 != 1 group by lower(textcol1), weight_string(lower(textcol1))", "OrderBy": "(0|2) ASC", - "Query": "select lower(textcol1) as v, count(*), weight_string(lower(textcol1)) from t_user group by v, weight_string(lower(textcol1)) order by v asc", + "Query": "select lower(textcol1) as v, count(*), weight_string(lower(textcol1)) from t_user group by lower(textcol1), weight_string(lower(textcol1)) order by lower(textcol1) asc", "Table": "t_user" } ] @@ -624,9 +624,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select char_length(texcol1) as a, count(*), weight_string(char_length(texcol1)) from t_user_0 where 1 != 1 group by a, weight_string(char_length(texcol1))", + "FieldQuery": "select char_length(texcol1) as a, count(*), weight_string(char_length(texcol1)) from t_user_0 where 1 != 1 group by char_length(texcol1), weight_string(char_length(texcol1))", "OrderBy": "(0|2) ASC", - "Query": "select char_length(texcol1) as a, count(*), weight_string(char_length(texcol1)) from t_user group by a, weight_string(char_length(texcol1)) order by a asc", + "Query": "select char_length(texcol1) as a, count(*), weight_string(char_length(texcol1)) from t_user group by char_length(texcol1), weight_string(char_length(texcol1)) order by char_length(t_user.texcol1) asc", "Table": "t_user" } ] @@ -656,9 +656,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select ascii(val1) as a, count(*), weight_string(ascii(val1)) from t_user_0 where 1 != 1 group by a, weight_string(ascii(val1))", + "FieldQuery": "select ascii(val1) as a, count(*), weight_string(ascii(val1)) from t_user_0 where 1 != 1 group by ascii(val1), weight_string(ascii(val1))", "OrderBy": "(0|2) ASC", - "Query": "select ascii(val1) as a, count(*), weight_string(ascii(val1)) from t_user group by a, weight_string(ascii(val1)) order by a asc", + "Query": "select ascii(val1) as a, count(*), weight_string(ascii(val1)) from t_user group by ascii(val1), weight_string(ascii(val1)) order by ascii(val1) asc", "Table": "t_user" } ] @@ -3129,7 +3129,8 @@ "Instructions": { "OperatorType": "Aggregate", "Variant": "Scalar", - "Aggregates": "min(0 COLLATE latin1_swedish_ci) AS min(textcol1), max(1 COLLATE latin1_swedish_ci) AS max(textcol2), sum_distinct(2 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(3 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", + "Aggregates": "min(0 COLLATE latin1_swedish_ci) AS min(textcol1), max(1|4) AS max(textcol2), sum_distinct(2 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(3 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", + "ResultColumns": 4, "Inputs": [ { "OperatorType": "TableRoute", @@ -3138,9 +3139,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select min(textcol1), max(textcol2), textcol1, textcol1 from t_user_0 where 1 != 1 group by textcol1", + "FieldQuery": "select min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from t_user_0 where 1 != 1 group by textcol1, weight_string(textcol2)", "OrderBy": "2 ASC COLLATE latin1_swedish_ci", - "Query": "select min(textcol1), max(textcol2), textcol1, textcol1 from t_user group by textcol1 order by textcol1 asc", + "Query": "select min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from t_user group by textcol1, weight_string(textcol2) order by textcol1 asc", "Table": "t_user" } ] @@ -3159,8 +3160,9 @@ "Instructions": { "OperatorType": "Aggregate", "Variant": "Ordered", - "Aggregates": "min(1 COLLATE latin1_swedish_ci) AS min(textcol1), max(2 COLLATE latin1_swedish_ci) AS max(textcol2), sum_distinct(3 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(4 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", + "Aggregates": "min(1 COLLATE latin1_swedish_ci) AS min(textcol1), max(2|5) AS max(textcol2), sum_distinct(3 COLLATE latin1_swedish_ci) AS sum(distinct textcol1), count_distinct(4 COLLATE latin1_swedish_ci) AS count(distinct textcol1)", "GroupBy": "0", + "ResultColumns": 5, "Inputs": [ { "OperatorType": "TableRoute", @@ -3169,9 +3171,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select col, min(textcol1), max(textcol2), textcol1, textcol1 from t_user_0 where 1 != 1 group by col, textcol1", + "FieldQuery": "select col, min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from t_user_0 where 1 != 1 group by col, textcol1, weight_string(textcol2)", "OrderBy": "0 ASC, 3 ASC COLLATE latin1_swedish_ci", - "Query": "select col, min(textcol1), max(textcol2), textcol1, textcol1 from t_user group by col, textcol1 order by col asc, textcol1 asc", + "Query": "select col, min(textcol1), max(textcol2), textcol1, textcol1, weight_string(textcol2) from t_user group by col, textcol1, weight_string(textcol2) order by col asc, textcol1 asc", "Table": "t_user" } ] @@ -3657,7 +3659,7 @@ "Original": "select foo, sum(foo) as fooSum, sum(bar) as barSum from t_user group by foo having fooSum+sum(bar) = 42", "Instructions": { "OperatorType": "Filter", - "Predicate": "sum(foo) + sum(bar) = 42", + "Predicate": "sum(t_user.foo) + sum(bar) = 42", "ResultColumns": 3, "Inputs": [ { @@ -3827,7 +3829,6 @@ "user.t_user_extra" ] } - }, { "comment": "Cannot have more than one aggr(distinct...", @@ -4786,7 +4787,7 @@ "Original": "select foo, sum(foo) as fooSum, sum(bar) as barSum from t_user where id=123 group by foo having fooSum+sum(bar) = 42", "Instructions": { "OperatorType": "Filter", - "Predicate": "sum(foo) + sum(bar) = 42", + "Predicate": "sum(t_user.foo) + sum(bar) = 42", "ResultColumns": 3, "Inputs": [ { @@ -6271,15 +6272,15 @@ } }, { - "comment":"correlated subquery in exists clause with an ordering", - "query":"select col, id from t_user where exists(select user_id from t_user_extra where user_id = 3 and user_id < t_user.id) order by id", - "plan":{ - "QueryType":"SELECT", - "Original":"select col, id from t_user where exists(select user_id from t_user_extra where user_id = 3 and user_id < t_user.id) order by id", - "Instructions":{ - "OperatorType":"SemiJoin", - "JoinVars":{ - "t_user_id":1 + "comment": "correlated subquery in exists clause with an ordering", + "query": "select col, id from t_user where exists(select user_id from t_user_extra where user_id = 3 and user_id < t_user.id) order by id", + "plan": { + "QueryType": "SELECT", + "Original": "select col, id from t_user where exists(select user_id from t_user_extra where user_id = 3 and user_id < t_user.id) order by id", + "Instructions": { + "OperatorType": "SemiJoin", + "JoinVars": { + "t_user_id": 1 }, "TableName": "t_user_t_user_extra", "Inputs": [ @@ -6293,7 +6294,7 @@ }, "FieldQuery": "select col, id, weight_string(id) from t_user_0 where 1 != 1", "OrderBy": "(1|2) ASC", - "Query": "select col, id, weight_string(id) from t_user order by id asc", + "Query": "select col, id, weight_string(id) from t_user order by t_user.id asc", "Table": "t_user" }, { @@ -6403,9 +6404,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select t_user_0.id, weight_string(t_user_0.id) from t_user_0 where 1 != 1 group by id, weight_string(t_user_0.id)", + "FieldQuery": "select t_user_0.id, weight_string(id) from t_user_0 where 1 != 1 group by id, weight_string(id)", "OrderBy": "(0|1) ASC, 1 ASC", - "Query": "select t_user.id, weight_string(t_user.id) from t_user group by id, weight_string(t_user.id) order by id asc, weight_string(t_user.id) asc", + "Query": "select t_user.id, weight_string(id) from t_user group by id, weight_string(id) order by id asc, weight_string(id) asc", "Table": "t_user" } ] @@ -6817,7 +6818,7 @@ "Sharded": true }, "FieldQuery": "select col from ref where 1 != 1", - "Query": "select col from ref order by col asc", + "Query": "select col from ref order by ref.col asc", "Table": "ref" }, "TablesUsed": [ diff --git a/go/vt/vtgate/planbuilder/testdata/table_filter_cases.json b/go/vt/vtgate/planbuilder/testdata/table_filter_cases.json index 3d23f75a390..b0a6a9142c4 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_filter_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_filter_cases.json @@ -542,7 +542,7 @@ "Sharded": true }, "FieldQuery": "select a + 2 as a from t_user_0 where 1 != 1", - "Query": "select a + 2 as a from t_user where a + 2 = 42", + "Query": "select a + 2 as a from t_user where t_user.a + 2 = 42", "Table": "t_user" }, "TablesUsed": [ diff --git a/go/vt/vtgate/planbuilder/testdata/table_memory_sort_cases.json b/go/vt/vtgate/planbuilder/testdata/table_memory_sort_cases.json index 0fa3baccfcb..3a9cc8ac690 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_memory_sort_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_memory_sort_cases.json @@ -24,9 +24,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(b) from t_user_0 where 1 != 1 group by a, weight_string(a)", + "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(t_user_0.b) from t_user_0 where 1 != 1 group by a, weight_string(a)", "OrderBy": "(0|3) ASC", - "Query": "select a, b, count(*), weight_string(a), weight_string(b) from t_user group by a, weight_string(a) order by a asc", + "Query": "select a, b, count(*), weight_string(a), weight_string(t_user.b) from t_user group by a, weight_string(a) order by a asc", "Table": "t_user" } ] @@ -102,9 +102,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, b, count(*) as k, weight_string(a), weight_string(b) from t_user_0 where 1 != 1 group by a, weight_string(a)", + "FieldQuery": "select a, b, count(*) as k, weight_string(a), weight_string(t_user_0.b) from t_user_0 where 1 != 1 group by a, weight_string(a)", "OrderBy": "(0|3) ASC", - "Query": "select a, b, count(*) as k, weight_string(a), weight_string(b) from t_user group by a, weight_string(a) order by a asc", + "Query": "select a, b, count(*) as k, weight_string(a), weight_string(t_user.b) from t_user group by a, weight_string(a) order by a asc", "Table": "t_user" } ] @@ -251,9 +251,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, convert(a, binary), weight_string(convert(a, binary)) from t_user_0 where 1 != 1", + "FieldQuery": "select a, convert(t_user_0.a, binary), weight_string(convert(t_user_0.a, binary)) from t_user_0 where 1 != 1", "OrderBy": "(1|2) DESC", - "Query": "select a, convert(a, binary), weight_string(convert(a, binary)) from t_user order by convert(a, binary) desc", + "Query": "select a, convert(t_user.a, binary), weight_string(convert(t_user.a, binary)) from t_user order by convert(t_user.a, binary) desc", "ResultColumns": 1, "Table": "t_user" }, @@ -277,7 +277,7 @@ }, "FieldQuery": "select id, intcol from t_user_0 where 1 != 1", "OrderBy": "1 ASC", - "Query": "select id, intcol from t_user order by intcol asc", + "Query": "select id, intcol from t_user order by t_user.intcol asc", "Table": "t_user" }, "TablesUsed": [ @@ -334,9 +334,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(b) from t_user_0 where 1 != 1 group by a, weight_string(a)", + "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(t_user_0.b) from t_user_0 where 1 != 1 group by a, weight_string(a)", "OrderBy": "(0|3) ASC", - "Query": "select a, b, count(*), weight_string(a), weight_string(b) from t_user where id = 1024 group by a, weight_string(a) order by a asc", + "Query": "select a, b, count(*), weight_string(a), weight_string(t_user.b) from t_user where id = 1024 group by a, weight_string(a) order by a asc", "Table": "t_user", "Values": [ "INT64(1024)" @@ -378,8 +378,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(b) from t_user_0 where 1 != 1 group by a, weight_string(a)", - "Query": "select a, b, count(*), weight_string(a), weight_string(b) from t_user where col = 1024 group by a, weight_string(a) order by a asc", + "FieldQuery": "select a, b, count(*), weight_string(a), weight_string(t_user_0.b) from t_user_0 where 1 != 1 group by a, weight_string(a)", + "Query": "select a, b, count(*), weight_string(a), weight_string(t_user.b) from t_user where col = 1024 group by a, weight_string(a) order by a asc", "Table": "t_user", "TableValues": [ "INT64(1024)" @@ -408,7 +408,7 @@ "Sharded": true }, "FieldQuery": "select a, b, count(*) from t_user_0 where 1 != 1 group by a", - "Query": "select a, b, count(*) from t_user where col = 1024 and id = 100865 group by a order by b asc", + "Query": "select a, b, count(*) from t_user where col = 1024 and id = 100865 group by a order by t_user.b asc", "Table": "t_user", "TableValues": [ "INT64(1024)" @@ -567,9 +567,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select u.a, convert(a, binary), weight_string(convert(a, binary)) from t_user_0 as u where 1 != 1", + "FieldQuery": "select u.a, convert(u.a, binary), weight_string(convert(u.a, binary)) from t_user_0 as u where 1 != 1", "OrderBy": "(1|2) DESC", - "Query": "select u.a, convert(a, binary), weight_string(convert(a, binary)) from t_user as u order by convert(a, binary) desc", + "Query": "select u.a, convert(u.a, binary), weight_string(convert(u.a, binary)) from t_user as u order by convert(u.a, binary) desc", "Table": "t_user" }, { diff --git a/go/vt/vtgate/planbuilder/testdata/table_postprocess_cases.json b/go/vt/vtgate/planbuilder/testdata/table_postprocess_cases.json index 8f81c04cb63..9870c473549 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_postprocess_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_postprocess_cases.json @@ -70,7 +70,7 @@ "Sharded": true }, "FieldQuery": "select col from t_user_0 where 1 != 1", - "Query": "select col from t_user order by col asc", + "Query": "select col from t_user order by t_user.col asc", "OrderBy": "0 ASC", "Table": "t_user" }, @@ -93,7 +93,7 @@ "Sharded": true }, "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1", - "Query": "select id, weight_string(id) from t_user order by id asc", + "Query": "select id, weight_string(id) from t_user order by t_user.id asc", "OrderBy": "(0|1) ASC", "ResultColumns": 1, "Table": "t_user" @@ -164,7 +164,7 @@ "Sharded": true }, "FieldQuery": "select id as foo, weight_string(id) from t_user_0 where 1 != 1", - "Query": "select id as foo, weight_string(id) from t_user order by foo asc", + "Query": "select id as foo, weight_string(id) from t_user order by id asc", "OrderBy": "(0|1) ASC", "ResultColumns": 1, "Table": "t_user" @@ -494,7 +494,7 @@ "Sharded": true }, "FieldQuery": "select col from t_user_0 where 1 != 1", - "Query": "select col from t_user where id = 1 and col = 6 order by col asc", + "Query": "select col from t_user where id = 1 and col = 6 order by t_user.col asc", "Values": [ "INT64(1)" ], @@ -523,7 +523,7 @@ "Sharded": true }, "FieldQuery": "select col as foo from t_user_0 where 1 != 1", - "Query": "select col as foo from t_user order by foo asc", + "Query": "select col as foo from t_user order by t_user.col asc", "OrderBy": "0 ASC", "Table": "t_user" }, @@ -546,7 +546,7 @@ "Sharded": true }, "FieldQuery": "select col as foo, col2 as col, weight_string(col2) from t_user_0 where 1 != 1", - "Query": "select col as foo, col2 as col, weight_string(col2) from t_user order by col asc", + "Query": "select col as foo, col2 as col, weight_string(col2) from t_user order by t_user.col2 asc", "OrderBy": "(1|2) ASC", "ResultColumns": 2, "Table": "t_user" @@ -570,7 +570,7 @@ "Sharded": true }, "FieldQuery": "select col, col from t_user_0 where 1 != 1", - "Query": "select col, col from t_user order by col asc", + "Query": "select col, col from t_user order by t_user.col asc", "OrderBy": "0 ASC", "Table": "t_user" }, @@ -616,8 +616,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select t_user_0.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from t_user_0 where 1 != 1", - "Query": "select t_user.col1 as a, a collate utf8_general_ci, weight_string(a collate utf8_general_ci) from t_user order by a collate utf8_general_ci asc", + "FieldQuery": "select t_user_0.col1 as a, t_user_0.col1 collate utf8_general_ci, weight_string(t_user_0.col1 collate utf8_general_ci) from t_user_0 where 1 != 1", + "Query": "select t_user.col1 as a, t_user.col1 collate utf8_general_ci, weight_string(t_user.col1 collate utf8_general_ci) from t_user order by t_user.col1 collate utf8_general_ci asc", "OrderBy": "(1|2) ASC", "ResultColumns": 1, "Table": "t_user" @@ -1032,7 +1032,7 @@ }, "OrderBy": "(0|3) ASC", "FieldQuery": "select t_user_0.col1 as a, t_user_0.col2, t_user_0.id, weight_string(t_user_0.col1) from t_user_0 where 1 != 1", - "Query": "select t_user.col1 as a, t_user.col2, t_user.id, weight_string(t_user.col1) from t_user where t_user.id = 1 order by a asc", + "Query": "select t_user.col1 as a, t_user.col2, t_user.id, weight_string(t_user.col1) from t_user where t_user.id = 1 order by t_user.col1 asc", "Table": "t_user", "Values": [ "INT64(1)" @@ -1086,7 +1086,7 @@ }, "OrderBy": "(0|3) ASC", "FieldQuery": "select t_user_0.col1 as a, t_user_0.col2, t_user_0.id, weight_string(t_user_0.col1) from t_user_0 where 1 != 1", - "Query": "select t_user.col1 as a, t_user.col2, t_user.id, weight_string(t_user.col1) from t_user where t_user.id = 1 order by a asc", + "Query": "select t_user.col1 as a, t_user.col2, t_user.id, weight_string(t_user.col1) from t_user where t_user.id = 1 order by t_user.col1 asc", "Table": "t_user", "Values": [ "INT64(1)" @@ -1238,7 +1238,7 @@ }, "FieldQuery": "select `name`, weight_string(`name`) from t_user_0 where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select `name`, weight_string(`name`) from t_user order by `name` asc", + "Query": "select `name`, weight_string(`name`) from t_user order by t_user.`name` asc", "Table": "t_user" }, { @@ -1288,7 +1288,7 @@ }, "FieldQuery": "select `name`, weight_string(`name`) from t_user_0 where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select `name`, weight_string(`name`) from t_user order by `name` asc", + "Query": "select `name`, weight_string(`name`) from t_user order by t_user.`name` asc", "Table": "t_user" }, { @@ -1506,7 +1506,7 @@ "Sharded": true }, "FieldQuery": "select id from t_user_0 where 1 != 1", - "Query": "select id from t_user where id in ::__vals and :__sq_has_values", + "Query": "select id from t_user where t_user.id in ::__vals and :__sq_has_values", "Table": "t_user", "Values": [ "::__sq1" @@ -1636,7 +1636,7 @@ }, "FieldQuery": "select a, textcol1, b, weight_string(a), weight_string(b) from t_user_0 where 1 != 1", "OrderBy": "(0|3) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|4) ASC", - "Query": "select a, textcol1, b, weight_string(a), weight_string(b) from t_user order by a asc, textcol1 asc, b asc", + "Query": "select a, textcol1, b, weight_string(a), weight_string(b) from t_user order by t_user.a asc, t_user.textcol1 asc, t_user.b asc", "ResultColumns": 3, "Table": "t_user" }, @@ -1660,7 +1660,7 @@ }, "FieldQuery": "select a, t_user_0.textcol1, b, weight_string(a), weight_string(b) from t_user_0 where 1 != 1", "OrderBy": "(0|3) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|4) ASC", - "Query": "select a, t_user.textcol1, b, weight_string(a), weight_string(b) from t_user order by a asc, textcol1 asc, b asc", + "Query": "select a, t_user.textcol1, b, weight_string(a), weight_string(b) from t_user order by t_user.a asc, t_user.textcol1 asc, t_user.b asc", "ResultColumns": 3, "Table": "t_user" }, @@ -1682,9 +1682,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b) from t_user_0 where 1 != 1", - "OrderBy": "(0|4) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|5) ASC, 3 ASC COLLATE latin1_swedish_ci", - "Query": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b) from t_user order by a asc, textcol1 asc, b asc, textcol2 asc", + "FieldQuery": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b), weight_string(textcol2) from t_user_0 where 1 != 1", + "OrderBy": "(0|4) ASC, 1 ASC COLLATE latin1_swedish_ci, (2|5) ASC, (3|6) ASC COLLATE ", + "Query": "select a, textcol1, b, textcol2, weight_string(a), weight_string(b), weight_string(textcol2) from t_user order by t_user.a asc, t_user.textcol1 asc, t_user.b asc, t_user.textcol2 asc", "ResultColumns": 4, "Table": "t_user" }, @@ -1708,7 +1708,7 @@ }, "FieldQuery": "select id as foo, weight_string(id) from t_music_0 where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id as foo, weight_string(id) from t_music order by foo asc", + "Query": "select id as foo, weight_string(id) from t_music order by id asc", "ResultColumns": 1, "Table": "t_music" }, @@ -1753,7 +1753,7 @@ }, "FieldQuery": "select col from t_user_0 where 1 != 1", "OrderBy": "0 ASC", - "Query": "select col from t_user where col in ::__sq1 and :__sq_has_values order by col asc", + "Query": "select col from t_user where col in ::__sq1 and :__sq_has_values order by t_user.col asc", "Table": "t_user", "TableValues": [ "::__sq1" @@ -2046,7 +2046,7 @@ "Sharded": true }, "FieldQuery": "select col1 from t_user_0 where 1 != 1", - "Query": "select col1 from t_user where id = 123 and col2 = 2 and col = 1 and col1 = 3", + "Query": "select col1 from t_user where id = 123 and col2 = 2 and col = 1 and t_user.col1 = 3", "Table": "t_user", "TableValues": [ "INT64(1)" @@ -2138,7 +2138,7 @@ "Sharded": true }, "FieldQuery": "select id from t_user_0 where 1 != 1", - "Query": "select id from t_user where id in ::__vals and :__sq_has_values", + "Query": "select id from t_user where t_user.id in ::__vals and :__sq_has_values", "Table": "t_user", "Values": [ "::__sq1" @@ -2207,7 +2207,7 @@ }, "FieldQuery": "select col from t_user_0 where 1 != 1 group by col", "OrderBy": "0 ASC", - "Query": "select col from t_user where id between :vtg1 and :vtg2 group by col order by col asc", + "Query": "select col from t_user where id between :vtg1 and :vtg2 group by col order by t_user.col asc", "Table": "t_user" } ] @@ -2238,7 +2238,7 @@ }, "FieldQuery": "select foo, col, weight_string(foo) from t_user_0 where 1 != 1 group by col, foo, weight_string(foo)", "OrderBy": "1 ASC, (0|2) ASC", - "Query": "select foo, col, weight_string(foo) from t_user where id between :vtg1 and :vtg2 group by col, foo, weight_string(foo) order by col asc, foo asc", + "Query": "select foo, col, weight_string(foo) from t_user where id between :vtg1 and :vtg2 group by col, foo, weight_string(foo) order by t_user.col asc, foo asc", "Table": "t_user" } ] diff --git a/go/vt/vtgate/planbuilder/testdata/table_postprocess_subquery_cases.json b/go/vt/vtgate/planbuilder/testdata/table_postprocess_subquery_cases.json index 8725553511a..7c79896125d 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_postprocess_subquery_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_postprocess_subquery_cases.json @@ -34,7 +34,7 @@ "Sharded": true }, "FieldQuery": "select id from t_user_0 where 1 != 1", - "Query": "select id from t_user where id in ::__vals and :__sq_has_values", + "Query": "select id from t_user where t_user.id in ::__vals and :__sq_has_values", "Table": "t_user", "Values": [ "::__sq1" @@ -84,7 +84,7 @@ }, "FieldQuery": "select col from t_user_0 where 1 != 1", "OrderBy": "0 ASC", - "Query": "select col from t_user where col in ::__sq1 and :__sq_has_values order by col asc", + "Query": "select col from t_user where col in ::__sq1 and :__sq_has_values order by t_user.col asc", "TableValues": [ "::__sq1" ], diff --git a/go/vt/vtgate/planbuilder/testdata/table_select_cases.json b/go/vt/vtgate/planbuilder/testdata/table_select_cases.json index 5ba7df9439f..3536fc649e0 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_select_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_select_cases.json @@ -144,8 +144,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select a.user_id, a.col1, a.col2 from t_authoritative_0 as a where 1 != 1", - "Query": "select a.user_id, a.col1, a.col2 from t_authoritative as a", + "FieldQuery": "select user_id, col1, col2 from t_authoritative_0 as a where 1 != 1", + "Query": "select user_id, col1, col2 from t_authoritative as a", "Table": "t_authoritative" }, "TablesUsed": [ @@ -173,7 +173,7 @@ }, "FieldQuery": "select user_id, weight_string(user_id) from t_music_0 where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select user_id, weight_string(user_id) from t_music order by user_id asc limit :__upper_limit", + "Query": "select user_id, weight_string(user_id) from t_music order by t_music.user_id asc limit :__upper_limit", "ResultColumns": 1, "Table": "t_music" } @@ -314,7 +314,7 @@ "Sharded": true }, "FieldQuery": "select user0_.col as col0_ from t_user_0 as user0_ where 1 != 1", - "Query": "select user0_.col as col0_ from t_user as user0_ where id = 1 and col = 12 order by col0_ desc limit 3", + "Query": "select user0_.col as col0_ from t_user as user0_ where id = 1 and col = 12 order by user0_.col desc limit 3", "Table": "t_user", "TableValues": [ "INT64(12)" @@ -381,7 +381,7 @@ }, "FieldQuery": "select user0_.col as col0_ from t_user_0 as user0_ where 1 != 1", "OrderBy": "0 DESC", - "Query": "select user0_.col as col0_ from t_user as user0_ where id = 1 order by col0_ desc limit :__upper_limit", + "Query": "select user0_.col as col0_ from t_user as user0_ where id = 1 order by user0_.col desc limit :__upper_limit", "Table": "t_user", "Values": [ "INT64(1)" @@ -1419,7 +1419,7 @@ }, "FieldQuery": "select u.foo, weight_string(u.foo) from t_user_0 as u where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select u.foo, weight_string(u.foo) from t_user as u order by foo asc", + "Query": "select u.foo, weight_string(u.foo) from t_user as u order by u.foo asc", "Table": "t_user" }, { diff --git a/go/vt/vtgate/planbuilder/testdata/table_subquery_select_cases.json b/go/vt/vtgate/planbuilder/testdata/table_subquery_select_cases.json index 57a7c9a8844..2ab6f4e030e 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_subquery_select_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_subquery_select_cases.json @@ -12,42 +12,48 @@ "TableName": "t_user_t_user_extra", "Inputs": [ { - "OperatorType": "UncorrelatedSubquery", - "Variant": "PulloutValue", - "PulloutVars": [ - "__sq1" - ], + "OperatorType": "Sort", + "Variant": "Memory", + "OrderBy": "(0|1) ASC", "Inputs": [ { - "InputName": "SubQuery", - "OperatorType": "Limit", - "Count": "INT64(1)", + "OperatorType": "UncorrelatedSubquery", + "Variant": "PulloutValue", + "PulloutVars": [ + "__sq1" + ], "Inputs": [ { + "InputName": "SubQuery", + "OperatorType": "Limit", + "Count": "INT64(1)", + "Inputs": [ + { + "OperatorType": "TableRoute", + "Variant": "Scatter-Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select col from t_user_0 where 1 != 1", + "Query": "select col from t_user limit :__upper_limit", + "Table": "t_user" + } + ] + }, + { + "InputName": "Outer", "OperatorType": "TableRoute", "Variant": "Scatter-Scatter", "Keyspace": { "Name": "user", "Sharded": true }, - "FieldQuery": "select col from t_user_0 where 1 != 1", - "Query": "select col from t_user limit :__upper_limit", + "FieldQuery": "select :__sq1 as a, weight_string(:__sq1) from t_user_0 where 1 != 1", + "Query": "select :__sq1 as a, weight_string(:__sq1) from t_user", "Table": "t_user" } ] - }, - { - "InputName": "Outer", - "OperatorType": "TableRoute", - "Variant": "Scatter-Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select :__sq1 as a, weight_string(:__sq1) from t_user_0 where 1 != 1", - "OrderBy": "(0|1) ASC", - "Query": "select :__sq1 as a, weight_string(:__sq1) from t_user order by a asc", - "Table": "t_user" } ] }, @@ -97,7 +103,7 @@ }, "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id, weight_string(id) from t_user order by id asc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_user order by t_user.id asc limit :__upper_limit", "Table": "t_user" } ] @@ -110,8 +116,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select :__sq1 as `(select id from t_user order by id asc limit 1)` from t_user_extra_0 where 1 != 1", - "Query": "select :__sq1 as `(select id from t_user order by id asc limit 1)` from t_user_extra", + "FieldQuery": "select :__sq1 as `(select id from t_user order by t_user.id asc limit 1)` from t_user_extra_0 where 1 != 1", + "Query": "select :__sq1 as `(select id from t_user order by t_user.id asc limit 1)` from t_user_extra", "Table": "t_user_extra" } ] diff --git a/go/vt/vtgate/planbuilder/testdata/table_union_cases.json b/go/vt/vtgate/planbuilder/testdata/table_union_cases.json index 1d4f8522ad9..8f9af20e538 100644 --- a/go/vt/vtgate/planbuilder/testdata/table_union_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/table_union_cases.json @@ -126,7 +126,7 @@ "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1", "OrderBy": "(0|1) DESC", "ResultColumns": 1, - "Query": "select id, weight_string(id) from t_user order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_user order by t_user.id desc limit :__upper_limit", "Table": "t_user" } ] @@ -145,7 +145,7 @@ "FieldQuery": "select id, weight_string(id) from t_music_0 where 1 != 1", "OrderBy": "(0|1) DESC", "ResultColumns": 1, - "Query": "select id, weight_string(id) from t_music order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_music order by t_music.id desc limit :__upper_limit", "Table": "t_music" } ] @@ -252,7 +252,7 @@ "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1", "OrderBy": "(0|1) ASC", "ResultColumns": 1, - "Query": "select id, weight_string(id) from t_user order by id asc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_user order by t_user.id asc limit :__upper_limit", "Table": "t_user" } ] @@ -271,7 +271,7 @@ "FieldQuery": "select id, weight_string(id) from t_music_0 where 1 != 1", "OrderBy": "(0|1) DESC", "ResultColumns": 1, - "Query": "select id, weight_string(id) from t_music order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_music order by t_music.id desc limit :__upper_limit", "Table": "t_music" } ] @@ -447,7 +447,7 @@ }, "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id, weight_string(id) from t_user order by id asc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_user order by t_user.id asc limit :__upper_limit", "Table": "t_user" } ] @@ -465,7 +465,7 @@ }, "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1", "OrderBy": "(0|1) DESC", - "Query": "select id, weight_string(id) from t_user order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from t_user order by t_user.id desc limit :__upper_limit", "Table": "t_user" } ] @@ -604,7 +604,7 @@ { "OperatorType": "Aggregate", "Variant": "Ordered", - "Aggregates": "any_value(1) AS weight_string(id)", + "Aggregates": "any_value(1) AS weight_string(t_user.id)", "GroupBy": "(0|1)", "ResultColumns": 1, "Inputs": [ @@ -615,9 +615,9 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select id, weight_string(id) from t_user_0 where 1 != 1 group by id", + "FieldQuery": "select id, weight_string(t_user_0.id) from t_user_0 where 1 != 1 group by id", "OrderBy": "(0|1) DESC", - "Query": "select id, weight_string(id) from t_user group by id order by id desc limit :__upper_limit", + "Query": "select id, weight_string(t_user.id) from t_user group by id order by t_user.id desc limit :__upper_limit", "Table": "t_user" } ] @@ -1398,61 +1398,7 @@ { "comment": "UNION with repeating column on the LHS", "query": "select col, col, col from t_user union select col1, col1, col1 from t_authoritative", - "plan": { - "QueryType": "SELECT", - "Original": "select col, col, col from t_user union select col1, col1, col1 from t_authoritative", - "Instructions": { - "OperatorType": "Distinct", - "Collations": [ - "0: latin1_swedish_ci", - "1: latin1_swedish_ci", - "2: latin1_swedish_ci" - ], - "Inputs": [ - { - "OperatorType": "Distinct", - "Collations": [ - "0", - "1", - "2" - ], - "Inputs": [ - { - "OperatorType": "Concatenate", - "Inputs": [ - { - "OperatorType": "TableRoute", - "Variant": "Scatter-Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select col, col, col from t_user_0 where 1 != 1", - "Query": "select distinct col, col, col from t_user", - "Table": "t_user" - }, - { - "OperatorType": "TableRoute", - "Variant": "Scatter-Scatter", - "Keyspace": { - "Name": "user", - "Sharded": true - }, - "FieldQuery": "select col1, col1, col1 from t_authoritative_0 where 1 != 1", - "Query": "select distinct col1, col1, col1 from t_authoritative", - "Table": "t_authoritative" - } - ] - } - ] - } - ] - }, - "TablesUsed": [ - "user.t_authoritative", - "user.t_user" - ] - } + "plan": "Duplicate column name 'col'" }, { "comment": "union all splitTable and not splitTable", diff --git a/go/vt/vtgate/planbuilder/testdata/tpcc_cases.json b/go/vt/vtgate/planbuilder/testdata/tpcc_cases.json index feaae4ec013..c58cdb95675 100644 --- a/go/vt/vtgate/planbuilder/testdata/tpcc_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/tpcc_cases.json @@ -556,7 +556,7 @@ "Sharded": true }, "FieldQuery": "select c_balance, c_first, c_middle, c_id from customer1 where 1 != 1", - "Query": "select c_balance, c_first, c_middle, c_id from customer1 where c_w_id = 840 and c_d_id = 1 and c_last = 'test' order by c_first asc", + "Query": "select c_balance, c_first, c_middle, c_id from customer1 where c_w_id = 840 and c_d_id = 1 and c_last = 'test' order by customer1.c_first asc", "Table": "customer1", "Values": [ "INT64(840)" @@ -608,7 +608,7 @@ "Sharded": true }, "FieldQuery": "select o_id, o_carrier_id, o_entry_d from orders1 where 1 != 1", - "Query": "select o_id, o_carrier_id, o_entry_d from orders1 where o_w_id = 9894 and o_d_id = 3 and o_c_id = 159 order by o_id desc", + "Query": "select o_id, o_carrier_id, o_entry_d from orders1 where o_w_id = 9894 and o_d_id = 3 and o_c_id = 159 order by orders1.o_id desc", "Table": "orders1", "Values": [ "INT64(9894)" @@ -660,7 +660,7 @@ "Sharded": true }, "FieldQuery": "select no_o_id from new_orders1 where 1 != 1", - "Query": "select no_o_id from new_orders1 where no_d_id = 689 and no_w_id = 15 order by no_o_id asc limit 1 for update", + "Query": "select no_o_id from new_orders1 where no_d_id = 689 and no_w_id = 15 order by new_orders1.no_o_id asc limit 1 for update", "Table": "new_orders1", "Values": [ "INT64(15)" diff --git a/go/vt/vtgate/planbuilder/testdata/tpch_cases.json b/go/vt/vtgate/planbuilder/testdata/tpch_cases.json index 0bf0f487c55..2d52ceba246 100644 --- a/go/vt/vtgate/planbuilder/testdata/tpch_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/tpch_cases.json @@ -170,7 +170,7 @@ }, "FieldQuery": "select o_orderpriority, count(*) as order_count, o_orderkey, weight_string(o_orderpriority) from orders where 1 != 1 group by o_orderpriority, o_orderkey, weight_string(o_orderpriority)", "OrderBy": "(0|3) ASC", - "Query": "select o_orderpriority, count(*) as order_count, o_orderkey, weight_string(o_orderpriority) from orders where o_orderdate >= date('1993-07-01') and o_orderdate < date('1993-07-01') + interval '3' month group by o_orderpriority, o_orderkey, weight_string(o_orderpriority) order by o_orderpriority asc", + "Query": "select o_orderpriority, count(*) as order_count, o_orderkey, weight_string(o_orderpriority) from orders where o_orderdate >= date('1993-07-01') and o_orderdate < date('1993-07-01') + interval '3' month group by o_orderpriority, o_orderkey, weight_string(o_orderpriority) order by orders.o_orderpriority asc", "Table": "orders" }, { @@ -590,9 +590,9 @@ "Name": "main", "Sharded": true }, - "FieldQuery": "select sum(volume) as revenue, l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year), supp_nation, weight_string(supp_nation), cust_nation, weight_string(cust_nation) from (select extract(year from l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume, orders.o_custkey as `orders.o_custkey`, lineitem.l_suppkey as `lineitem.l_suppkey`, lineitem.l_orderkey as `lineitem.l_orderkey` from lineitem where 1 != 1) as shipping where 1 != 1 group by l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year)", + "FieldQuery": "select sum(volume) as revenue, l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year), shipping.supp_nation, weight_string(shipping.supp_nation), shipping.cust_nation, weight_string(shipping.cust_nation) from (select extract(year from l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume, orders.o_custkey as `orders.o_custkey`, lineitem.l_suppkey as `lineitem.l_suppkey`, lineitem.l_orderkey as `lineitem.l_orderkey` from lineitem where 1 != 1) as shipping where 1 != 1 group by l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year)", "OrderBy": "(7|8) ASC, (9|10) ASC, (1|6) ASC", - "Query": "select sum(volume) as revenue, l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year), supp_nation, weight_string(supp_nation), cust_nation, weight_string(cust_nation) from (select extract(year from l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume, orders.o_custkey as `orders.o_custkey`, lineitem.l_suppkey as `lineitem.l_suppkey`, lineitem.l_orderkey as `lineitem.l_orderkey` from lineitem where l_shipdate between date('1995-01-01') and date('1996-12-31')) as shipping group by l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year) order by supp_nation asc, cust_nation asc, l_year asc", + "Query": "select sum(volume) as revenue, l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year), shipping.supp_nation, weight_string(shipping.supp_nation), shipping.cust_nation, weight_string(shipping.cust_nation) from (select extract(year from l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume, orders.o_custkey as `orders.o_custkey`, lineitem.l_suppkey as `lineitem.l_suppkey`, lineitem.l_orderkey as `lineitem.l_orderkey` from lineitem where l_shipdate between date('1995-01-01') and date('1996-12-31')) as shipping group by l_year, shipping.`orders.o_custkey`, shipping.`n1.n_name`, shipping.`lineitem.l_suppkey`, shipping.`lineitem.l_orderkey`, weight_string(l_year) order by shipping.supp_nation asc, shipping.cust_nation asc, shipping.l_year asc", "Table": "lineitem" }, { @@ -1477,7 +1477,7 @@ }, "FieldQuery": "select s_suppkey, s_name, s_address, s_phone, total_revenue, weight_string(s_suppkey) from supplier, revenue0 where 1 != 1", "OrderBy": "(0|5) ASC", - "Query": "select s_suppkey, s_name, s_address, s_phone, total_revenue, weight_string(s_suppkey) from supplier, revenue0 where s_suppkey = supplier_no and total_revenue = :__sq1 order by s_suppkey asc", + "Query": "select s_suppkey, s_name, s_address, s_phone, total_revenue, weight_string(s_suppkey) from supplier, revenue0 where s_suppkey = supplier_no and total_revenue = :__sq1 order by supplier.s_suppkey asc", "ResultColumns": 5, "Table": "revenue0, supplier" } diff --git a/go/vt/vtgate/planbuilder/testdata/union_cases.json b/go/vt/vtgate/planbuilder/testdata/union_cases.json index 61a1df35a24..ca29c5646ec 100644 --- a/go/vt/vtgate/planbuilder/testdata/union_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/union_cases.json @@ -128,7 +128,7 @@ }, "FieldQuery": "select id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|1) DESC", - "Query": "select id, weight_string(id) from `user` order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from `user` order by `user`.id desc limit :__upper_limit", "Table": "`user`" } ] @@ -146,7 +146,7 @@ }, "FieldQuery": "select id, weight_string(id) from music where 1 != 1", "OrderBy": "(0|1) DESC", - "Query": "select id, weight_string(id) from music order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from music order by music.id desc limit :__upper_limit", "Table": "music" } ] @@ -258,7 +258,7 @@ }, "FieldQuery": "select id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id, weight_string(id) from `user` order by id asc limit :__upper_limit", + "Query": "select id, weight_string(id) from `user` order by `user`.id asc limit :__upper_limit", "Table": "`user`" } ] @@ -276,7 +276,7 @@ }, "FieldQuery": "select id, weight_string(id) from music where 1 != 1", "OrderBy": "(0|1) DESC", - "Query": "select id, weight_string(id) from music order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from music order by music.id desc limit :__upper_limit", "Table": "music" } ] @@ -373,8 +373,9 @@ "Instructions": { "OperatorType": "Distinct", "Collations": [ - "0" + "(0:1)" ], + "ResultColumns": 1, "Inputs": [ { "OperatorType": "Route", @@ -383,8 +384,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select id from `user` where 1 != 1 union select id from music where 1 != 1 union select 1 from dual where 1 != 1", - "Query": "select id from `user` union select id from music union select 1 from dual", + "FieldQuery": "select id, weight_string(id) from (select id from `user` where 1 != 1 union select id from music where 1 != 1 union select 1 from dual where 1 != 1) as dt where 1 != 1", + "Query": "select id, weight_string(id) from (select id from `user` union select id from music union select 1 from dual) as dt", "Table": "`user`, dual, music" } ] @@ -522,8 +523,9 @@ "Instructions": { "OperatorType": "Distinct", "Collations": [ - "0" + "(0:1)" ], + "ResultColumns": 1, "Inputs": [ { "OperatorType": "Route", @@ -532,8 +534,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select 1 from dual where 1 != 1 union select null from dual where 1 != 1 union select 1.0 from dual where 1 != 1 union select '1' from dual where 1 != 1 union select 2 from dual where 1 != 1 union select 2.0 from `user` where 1 != 1", - "Query": "select 1 from dual union select null from dual union select 1.0 from dual union select '1' from dual union select 2 from dual union select 2.0 from `user`", + "FieldQuery": "select `1`, weight_string(`1`) from (select 1 from dual where 1 != 1 union select null from dual where 1 != 1 union select 1.0 from dual where 1 != 1 union select '1' from dual where 1 != 1 union select 2 from dual where 1 != 1 union select 2.0 from `user` where 1 != 1) as dt where 1 != 1", + "Query": "select `1`, weight_string(`1`) from (select 1 from dual union select null from dual union select 1.0 from dual union select '1' from dual union select 2 from dual union select 2.0 from `user`) as dt", "Table": "`user`, dual" } ] @@ -841,8 +843,9 @@ { "OperatorType": "Distinct", "Collations": [ - "0" + "(0:1)" ], + "ResultColumns": 1, "Inputs": [ { "OperatorType": "Route", @@ -851,8 +854,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select id from `user` where 1 != 1 union select 3 from dual where 1 != 1", - "Query": "select id from `user` union select 3 from dual limit :__upper_limit", + "FieldQuery": "select id, weight_string(id) from (select id from `user` where 1 != 1 union select 3 from dual where 1 != 1) as dt where 1 != 1", + "Query": "select id, weight_string(id) from (select id from `user` union select 3 from dual limit :__upper_limit) as dt", "Table": "`user`, dual" } ] @@ -973,7 +976,7 @@ }, "FieldQuery": "select id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|1) ASC", - "Query": "select id, weight_string(id) from `user` order by id asc limit :__upper_limit", + "Query": "select id, weight_string(id) from `user` order by `user`.id asc limit :__upper_limit", "Table": "`user`" } ] @@ -991,7 +994,7 @@ }, "FieldQuery": "select id, weight_string(id) from `user` where 1 != 1", "OrderBy": "(0|1) DESC", - "Query": "select id, weight_string(id) from `user` order by id desc limit :__upper_limit", + "Query": "select id, weight_string(id) from `user` order by `user`.id desc limit :__upper_limit", "Table": "`user`" } ] @@ -1057,7 +1060,7 @@ { "OperatorType": "Distinct", "Collations": [ - "0", + "(0:1)", "1" ], "Inputs": [ @@ -1209,8 +1212,8 @@ "Name": "user", "Sharded": true }, - "FieldQuery": "select X.`name`, X.foo from (select `name`, id as foo from `user` where 1 != 1 union select 'extra', user_id from user_extra where 1 != 1) as X where 1 != 1", - "Query": "select X.`name`, X.foo from (select `name`, id as foo from `user` where id = 3 union select 'extra', user_id from user_extra where user_id = 3) as X", + "FieldQuery": "select `name`, foo from (select `name`, id as foo from `user` where 1 != 1 union select 'extra', user_id from user_extra where 1 != 1) as X where 1 != 1", + "Query": "select `name`, foo from (select `name`, id as foo from `user` where id = 3 union select 'extra', user_id from user_extra where user_id = 3) as X", "Table": "`user`, user_extra", "Values": [ "INT64(3)" @@ -1260,8 +1263,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select kcu.CONSTRAINT_CATALOG, kcu.CONSTRAINT_SCHEMA, kcu.CONSTRAINT_NAME, kcu.TABLE_CATALOG, kcu.TABLE_SCHEMA, kcu.TABLE_NAME, kcu.COLUMN_NAME, kcu.ORDINAL_POSITION, kcu.POSITION_IN_UNIQUE_CONSTRAINT, kcu.REFERENCED_TABLE_SCHEMA, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where 1 != 1", - "Query": "select distinct kcu.CONSTRAINT_CATALOG, kcu.CONSTRAINT_SCHEMA, kcu.CONSTRAINT_NAME, kcu.TABLE_CATALOG, kcu.TABLE_SCHEMA, kcu.TABLE_NAME, kcu.COLUMN_NAME, kcu.ORDINAL_POSITION, kcu.POSITION_IN_UNIQUE_CONSTRAINT, kcu.REFERENCED_TABLE_SCHEMA, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.table_name = :kcu_table_name /* VARCHAR */", + "FieldQuery": "select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where 1 != 1", + "Query": "select distinct CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.table_name = :kcu_table_name /* VARCHAR */", "SysTableTableName": "[kcu_table_name:VARCHAR(\"user_extra\")]", "SysTableTableSchema": "[VARCHAR(\"user\")]", "Table": "information_schema.key_column_usage" @@ -1273,8 +1276,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select kcu.CONSTRAINT_CATALOG, kcu.CONSTRAINT_SCHEMA, kcu.CONSTRAINT_NAME, kcu.TABLE_CATALOG, kcu.TABLE_SCHEMA, kcu.TABLE_NAME, kcu.COLUMN_NAME, kcu.ORDINAL_POSITION, kcu.POSITION_IN_UNIQUE_CONSTRAINT, kcu.REFERENCED_TABLE_SCHEMA, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where 1 != 1", - "Query": "select distinct kcu.CONSTRAINT_CATALOG, kcu.CONSTRAINT_SCHEMA, kcu.CONSTRAINT_NAME, kcu.TABLE_CATALOG, kcu.TABLE_SCHEMA, kcu.TABLE_NAME, kcu.COLUMN_NAME, kcu.ORDINAL_POSITION, kcu.POSITION_IN_UNIQUE_CONSTRAINT, kcu.REFERENCED_TABLE_SCHEMA, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.table_name = :kcu_table_name1 /* VARCHAR */", + "FieldQuery": "select CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where 1 != 1", + "Query": "select distinct CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION, POSITION_IN_UNIQUE_CONSTRAINT, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME from information_schema.key_column_usage as kcu where kcu.table_schema = :__vtschemaname /* VARCHAR */ and kcu.table_name = :kcu_table_name1 /* VARCHAR */", "SysTableTableName": "[kcu_table_name1:VARCHAR(\"music\")]", "SysTableTableSchema": "[VARCHAR(\"user\")]", "Table": "information_schema.key_column_usage" @@ -1492,5 +1495,190 @@ "user.user" ] } + }, + { + "comment": "Select literals from table union Select literals from table", + "query": "SELECT 1 from user UNION SELECT 2 from user", + "plan": { + "QueryType": "SELECT", + "Original": "SELECT 1 from user UNION SELECT 2 from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "0" + ], + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select 1 from `user` where 1 != 1 union select 2 from `user` where 1 != 1", + "Query": "select 1 from `user` union select 2 from `user`", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "Select column from table union Select literals from table", + "query": "select col1 from user union select 3 from user", + "plan": { + "QueryType": "SELECT", + "Original": "select col1 from user union select 3 from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "(0:1)" + ], + "ResultColumns": 1, + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select col1, weight_string(col1) from (select col1 from `user` where 1 != 1 union select 3 from `user` where 1 != 1) as dt where 1 != 1", + "Query": "select col1, weight_string(col1) from (select col1 from `user` union select 3 from `user`) as dt", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "Select literals from table union Select column from table", + "query": "select 3 from user union select col1 from user", + "plan": { + "QueryType": "SELECT", + "Original": "select 3 from user union select col1 from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "(0:1)" + ], + "ResultColumns": 1, + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select `3`, weight_string(`3`) from (select 3 from `user` where 1 != 1 union select col1 from `user` where 1 != 1) as dt where 1 != 1", + "Query": "select `3`, weight_string(`3`) from (select 3 from `user` union select col1 from `user`) as dt", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "Select literals from table union Select now() from table", + "query": "select 3 from user union select now() from user", + "plan": { + "QueryType": "SELECT", + "Original": "select 3 from user union select now() from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "(0:1)" + ], + "ResultColumns": 1, + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select `3`, weight_string(`3`) from (select 3 from `user` where 1 != 1 union select now() from `user` where 1 != 1) as dt where 1 != 1", + "Query": "select `3`, weight_string(`3`) from (select 3 from `user` union select now() from `user`) as dt", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "Select now() from table union Select literals from table", + "query": "select now() from user union select 3 from user", + "plan": { + "QueryType": "SELECT", + "Original": "select now() from user union select 3 from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "(0:1)" + ], + "ResultColumns": 1, + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select `now()`, weight_string(`now()`) from (select now() from `user` where 1 != 1 union select 3 from `user` where 1 != 1) as dt where 1 != 1", + "Query": "select `now()`, weight_string(`now()`) from (select now() from `user` union select 3 from `user`) as dt", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } + }, + { + "comment": "Select now() from table union Select column from table", + "query": "select now() from user union select id from user", + "plan": { + "QueryType": "SELECT", + "Original": "select now() from user union select id from user", + "Instructions": { + "OperatorType": "Distinct", + "Collations": [ + "(0:1)" + ], + "ResultColumns": 1, + "Inputs": [ + { + "OperatorType": "Route", + "Variant": "Scatter", + "Keyspace": { + "Name": "user", + "Sharded": true + }, + "FieldQuery": "select `now()`, weight_string(`now()`) from (select now() from `user` where 1 != 1 union select id from `user` where 1 != 1) as dt where 1 != 1", + "Query": "select `now()`, weight_string(`now()`) from (select now() from `user` union select id from `user`) as dt", + "Table": "`user`" + } + ] + }, + "TablesUsed": [ + "user.user" + ] + } } ] \ No newline at end of file diff --git a/go/vt/vtgate/planbuilder/testdata/unsupported_cases.json b/go/vt/vtgate/planbuilder/testdata/unsupported_cases.json index 4d6ef878693..f2cbbd76ede 100644 --- a/go/vt/vtgate/planbuilder/testdata/unsupported_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/unsupported_cases.json @@ -67,7 +67,7 @@ { "comment": "Group by column number, used with non-aliased expression (duplicated code)", "query": "select * from user group by 1", - "plan": "cannot use column offsets in group statement when using `*`" + "plan": "cannot use column offsets in group clause when using `*`" }, { "comment": "Multi-value aggregates not supported", @@ -354,11 +354,6 @@ "query": "select id2 from user uu where id in (select id from user where id = uu.id and user.col in (select col from (select col, id, user_id from user_extra where user_id = 5) uu where uu.user_id = uu.id))", "plan": "VT12001: unsupported: correlated subquery is only supported for EXISTS" }, - { - "comment": "rewrite of 'order by 2' that becomes 'order by id', leading to ambiguous binding.", - "query": "select a.id, b.id from user as a, user_extra as b union select 1, 2 order by 2", - "plan": "Column 'id' in field list is ambiguous" - }, { "comment": "unsupported with clause in delete statement", "query": "with x as (select * from user) delete from x", diff --git a/go/vt/vtgate/planbuilder/testdata/vschemas/schema.json b/go/vt/vtgate/planbuilder/testdata/vschemas/schema.json index 14af2f3ca0c..00fbafc12ed 100644 --- a/go/vt/vtgate/planbuilder/testdata/vschemas/schema.json +++ b/go/vt/vtgate/planbuilder/testdata/vschemas/schema.json @@ -25,6 +25,12 @@ "user.user" ] }, + { + "from_table": "second_user.bar", + "to_tables": [ + "user.music" + ] + }, { "from_table": "primary_redirect@primary", "to_tables": [ @@ -489,8 +495,7 @@ "sharded": true, "vindexes": { "hash_dup": { - "type": "hash_test", - "owner": "user" + "type": "hash_test" } }, "tables": { diff --git a/go/vt/vtgate/semantics/analyzer.go b/go/vt/vtgate/semantics/analyzer.go index 1cb457f6882..01af4b186a8 100644 --- a/go/vt/vtgate/semantics/analyzer.go +++ b/go/vt/vtgate/semantics/analyzer.go @@ -25,47 +25,64 @@ import ( // analyzer controls the flow of the analysis. // It starts the tree walking and controls which part of the analysis sees which parts of the tree type analyzer struct { - scoper *scoper - tables *tableCollector - binder *binder - typer *typer - rewriter *earlyRewriter - sig QuerySignature + scoper *scoper + earlyTables *earlyTableCollector + tables *tableCollector + binder *binder + typer *typer + rewriter *earlyRewriter + sig QuerySignature + si SchemaInformation + currentDb string + recheck bool err error inProjection int - projErr error - unshardedErr error - warning string + projErr error + unshardedErr error + warning string + singleUnshardedKeyspace bool + fullAnalysis bool } // newAnalyzer create the semantic analyzer -func newAnalyzer(dbName string, si SchemaInformation) *analyzer { +func newAnalyzer(dbName string, si SchemaInformation, fullAnalysis bool) *analyzer { // TODO dependencies between these components are a little tangled. We should try to clean up s := newScoper() a := &analyzer{ - scoper: s, - tables: newTableCollector(s, si, dbName), - typer: newTyper(), + scoper: s, + earlyTables: newEarlyTableCollector(si, dbName), + typer: newTyper(), + si: si, + currentDb: dbName, + fullAnalysis: fullAnalysis, } s.org = a - a.tables.org = a + return a +} - b := newBinder(s, a, a.tables, a.typer) - a.binder = b +func (a *analyzer) lateInit() { + a.tables = a.earlyTables.newTableCollector(a.scoper, a) + a.binder = newBinder(a.scoper, a, a.tables, a.typer) + a.scoper.binder = a.binder a.rewriter = &earlyRewriter{ - scoper: s, - binder: b, + scoper: a.scoper, + binder: a.binder, expandedColumns: map[sqlparser.TableName][]*sqlparser.ColName{}, + aliasMapCache: map[*sqlparser.Select]map[string]exprContainer{}, + reAnalyze: a.reAnalyze, + tables: a.tables, } - s.binder = b - return a } // Analyze analyzes the parsed query. func Analyze(statement sqlparser.Statement, currentDb string, si SchemaInformation) (*SemTable, error) { - analyzer := newAnalyzer(currentDb, newSchemaInfo(si)) + return analyseAndGetSemTable(statement, currentDb, si, false) +} + +func analyseAndGetSemTable(statement sqlparser.Statement, currentDb string, si SchemaInformation, fullAnalysis bool) (*SemTable, error) { + analyzer := newAnalyzer(currentDb, newSchemaInfo(si), fullAnalysis) // Analysis for initial scope err := analyzer.analyze(statement) @@ -74,14 +91,12 @@ func Analyze(statement sqlparser.Statement, currentDb string, si SchemaInformati } // Creation of the semantic table - semTable := analyzer.newSemTable(statement, si.ConnCollation()) - - return semTable, nil + return analyzer.newSemTable(statement, si.ConnCollation()) } // AnalyzeStrict analyzes the parsed query, and fails the analysis for any possible errors func AnalyzeStrict(statement sqlparser.Statement, currentDb string, si SchemaInformation) (*SemTable, error) { - st, err := Analyze(statement, currentDb, si) + st, err := analyseAndGetSemTable(statement, currentDb, si, true) if err != nil { return nil, err } @@ -96,12 +111,36 @@ func AnalyzeStrict(statement sqlparser.Statement, currentDb string, si SchemaInf return st, nil } -func (a *analyzer) newSemTable(statement sqlparser.Statement, coll collations.ID) *SemTable { +func (a *analyzer) newSemTable( + statement sqlparser.Statement, + coll collations.ID, +) (*SemTable, error) { var comments *sqlparser.ParsedComments commentedStmt, isCommented := statement.(sqlparser.Commented) if isCommented { comments = commentedStmt.GetParsedComments() } + + if a.singleUnshardedKeyspace { + return &SemTable{ + Tables: a.earlyTables.Tables, + Comments: comments, + Warning: a.warning, + Collation: coll, + ExprTypes: map[sqlparser.Expr]Type{}, + NotSingleRouteErr: a.projErr, + NotUnshardedErr: a.unshardedErr, + Recursive: ExprDependencies{}, + Direct: ExprDependencies{}, + ColumnEqualities: map[columnName][]sqlparser.Expr{}, + ExpandedColumns: map[sqlparser.TableName][]*sqlparser.ColName{}, + columns: map[*sqlparser.Union]sqlparser.SelectExprs{}, + comparator: nil, + StatementIDs: a.scoper.statementIDs, + QuerySignature: QuerySignature{}, + }, nil + } + columns := map[*sqlparser.Union]sqlparser.SelectExprs{} for union, info := range a.tables.unionInfo { columns[union] = info.exprs @@ -122,7 +161,7 @@ func (a *analyzer) newSemTable(statement sqlparser.Statement, coll collations.ID columns: columns, StatementIDs: a.scoper.statementIDs, QuerySignature: a.sig, - } + }, nil } func (a *analyzer) setError(err error) { @@ -176,10 +215,6 @@ func (a *analyzer) analyzeUp(cursor *sqlparser.Cursor) bool { return false } - if err := a.scoper.up(cursor); err != nil { - a.setError(err) - return false - } if err := a.tables.up(cursor); err != nil { a.setError(err) return false @@ -195,9 +230,17 @@ func (a *analyzer) analyzeUp(cursor *sqlparser.Cursor) bool { return false } - if err := a.rewriter.up(cursor); err != nil { + if !a.recheck { + // no need to run the rewriter on rechecking + if err := a.rewriter.up(cursor); err != nil { + a.setError(err) + return true + } + } + + if err := a.scoper.up(cursor); err != nil { a.setError(err) - return true + return false } a.leaveProjection(cursor) @@ -279,10 +322,55 @@ func (a *analyzer) depsForExpr(expr sqlparser.Expr) (direct, recursive TableSet, } func (a *analyzer) analyze(statement sqlparser.Statement) error { + _ = sqlparser.Rewrite(statement, nil, a.earlyUp) + if a.err != nil { + return a.err + } + + if a.canShortCut(statement) { + return nil + } + + a.lateInit() + + return a.lateAnalyze(statement) +} + +func (a *analyzer) lateAnalyze(statement sqlparser.SQLNode) error { _ = sqlparser.Rewrite(statement, a.analyzeDown, a.analyzeUp) return a.err } +func (a *analyzer) reAnalyze(statement sqlparser.SQLNode) error { + a.recheck = true + defer func() { + a.recheck = false + }() + return a.lateAnalyze(statement) +} + +// canShortCut checks if we are dealing with a single unsharded keyspace and no tables that have managed foreign keys +// if so, we can stop the analyzer early +func (a *analyzer) canShortCut(statement sqlparser.Statement) bool { + if a.fullAnalysis { + return false + } + ks, _ := singleUnshardedKeyspace(a.earlyTables.Tables) + if ks == nil { + return false + } + + a.singleUnshardedKeyspace = !sqlparser.IsDMLStatement(statement) + return a.singleUnshardedKeyspace +} + +// earlyUp collects tables in the query, so we can check +// if this a single unsharded query we are dealing with +func (a *analyzer) earlyUp(cursor *sqlparser.Cursor) bool { + a.earlyTables.up(cursor) + return true +} + func (a *analyzer) shouldContinue() bool { return a.err == nil } @@ -328,6 +416,10 @@ type ShardedError struct { Inner error } +func (p ShardedError) Unwrap() error { + return p.Inner +} + func (p ShardedError) Error() string { return p.Inner.Error() } diff --git a/go/vt/vtgate/semantics/analyzer_test.go b/go/vt/vtgate/semantics/analyzer_test.go index 21222da2263..5062819a88b 100644 --- a/go/vt/vtgate/semantics/analyzer_test.go +++ b/go/vt/vtgate/semantics/analyzer_test.go @@ -28,7 +28,7 @@ import ( "vitess.io/vitess/go/vt/vtgate/vindexes" ) -var T0 TableSet +var NoTables TableSet var ( // Just here to make outputs more readable @@ -586,7 +586,7 @@ func TestOrderByBindingTable(t *testing.T) { TS0, }, { "select 1 as c from tabl order by c", - T0, + NoTables, }, { "select name, name from t1, t2 order by name", TS1, @@ -664,7 +664,7 @@ func TestGroupByBinding(t *testing.T) { TS0, }, { "select 1 as c from tabl group by c", - T0, + NoTables, }, { "select t1.id from t1, t2 group by id", TS0, @@ -676,7 +676,10 @@ func TestGroupByBinding(t *testing.T) { TS1, }, { "select a.id from t as a, t1 group by id", - TS0, + // since we have authoritative info on t1, we know that it does have an `id` column, + // and we are missing column info for `t`, we just assume this is coming from t1. + // we really need schema tracking here + TS1, }, { "select a.id from t, t1 as a group by id", TS1, @@ -694,44 +697,47 @@ func TestGroupByBinding(t *testing.T) { func TestHavingBinding(t *testing.T) { tcases := []struct { - sql string - deps TableSet + sql, err string + deps TableSet }{{ - "select col from tabl having col = 1", - TS0, + sql: "select col from tabl having col = 1", + deps: TS0, }, { - "select col from tabl having tabl.col = 1", - TS0, + sql: "select col from tabl having tabl.col = 1", + deps: TS0, }, { - "select col from tabl having d.tabl.col = 1", - TS0, + sql: "select col from tabl having d.tabl.col = 1", + deps: TS0, }, { - "select tabl.col as x from tabl having x = 1", - TS0, + sql: "select tabl.col as x from tabl having col = 1", + deps: TS0, }, { - "select tabl.col as x from tabl having col", - TS0, + sql: "select tabl.col as x from tabl having x = 1", + deps: TS0, }, { - "select col from tabl having 1 = 1", - T0, + sql: "select tabl.col as x from tabl having col", + deps: TS0, }, { - "select col as c from tabl having c = 1", - TS0, + sql: "select col from tabl having 1 = 1", + deps: NoTables, }, { - "select 1 as c from tabl having c = 1", - T0, + sql: "select col as c from tabl having c = 1", + deps: TS0, }, { - "select t1.id from t1, t2 having id = 1", - TS0, + sql: "select 1 as c from tabl having c = 1", + deps: NoTables, }, { - "select t.id from t, t1 having id = 1", - TS0, + sql: "select t1.id from t1, t2 having id = 1", + deps: TS0, }, { - "select t.id, count(*) as a from t, t1 group by t.id having a = 1", - MergeTableSets(TS0, TS1), + sql: "select t.id from t, t1 having id = 1", + deps: TS0, }, { - "select t.id, sum(t2.name) as a from t, t2 group by t.id having a = 1", - TS1, + sql: "select t.id, count(*) as a from t, t1 group by t.id having a = 1", + deps: MergeTableSets(TS0, TS1), + }, { + sql: "select t.id, sum(t2.name) as a from t, t2 group by t.id having a = 1", + deps: TS1, }, { sql: "select u2.a, u1.a from u1, u2 having u2.a = 2", deps: TS1, @@ -877,109 +883,6 @@ func TestUnionWithOrderBy(t *testing.T) { assert.Equal(t, TS1, d2) } -func TestScopingWDerivedTables(t *testing.T) { - queries := []struct { - query string - errorMessage string - recursiveExpectation TableSet - expectation TableSet - }{ - { - query: "select id from (select x as id from user) as t", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select id from (select foo as id from user) as t", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select id from (select foo as id from (select x as foo from user) as c) as t", - recursiveExpectation: TS0, - expectation: TS2, - }, { - query: "select t.id from (select foo as id from user) as t", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select t.id2 from (select foo as id from user) as t", - errorMessage: "column 't.id2' not found", - }, { - query: "select id from (select 42 as id) as t", - recursiveExpectation: T0, - expectation: TS1, - }, { - query: "select t.id from (select 42 as id) as t", - recursiveExpectation: T0, - expectation: TS1, - }, { - query: "select ks.t.id from (select 42 as id) as t", - errorMessage: "column 'ks.t.id' not found", - }, { - query: "select * from (select id, id from user) as t", - errorMessage: "Duplicate column name 'id'", - }, { - query: "select t.baz = 1 from (select id as baz from user) as t", - expectation: TS1, - recursiveExpectation: TS0, - }, { - query: "select t.id from (select * from user, music) as t", - expectation: TS2, - recursiveExpectation: MergeTableSets(TS0, TS1), - }, { - query: "select t.id from (select * from user, music) as t order by t.id", - expectation: TS2, - recursiveExpectation: MergeTableSets(TS0, TS1), - }, { - query: "select t.id from (select * from user) as t join user as u on t.id = u.id", - expectation: TS1, - recursiveExpectation: TS0, - }, { - query: "select t.col1 from t3 ua join (select t1.id, t1.col1 from t1 join t2) as t", - expectation: TS3, - recursiveExpectation: TS1, - }, { - query: "select uu.test from (select id from t1) uu", - errorMessage: "column 'uu.test' not found", - }, { - query: "select uu.id from (select id as col from t1) uu", - errorMessage: "column 'uu.id' not found", - }, { - query: "select uu.id from (select id as col from t1) uu", - errorMessage: "column 'uu.id' not found", - }, { - query: "select uu.id from (select id from t1) as uu where exists (select * from t2 as uu where uu.id = uu.uid)", - expectation: TS1, - recursiveExpectation: TS0, - }, { - query: "select 1 from user uu where exists (select 1 from user where exists (select 1 from (select 1 from t1) uu where uu.user_id = uu.id))", - expectation: T0, - recursiveExpectation: T0, - }} - for _, query := range queries { - t.Run(query.query, func(t *testing.T) { - parse, err := sqlparser.Parse(query.query) - require.NoError(t, err) - st, err := Analyze(parse, "user", &FakeSI{ - Tables: map[string]*vindexes.Table{ - "t": {Name: sqlparser.NewIdentifierCS("t")}, - }, - }) - - switch { - case query.errorMessage != "" && err != nil: - require.EqualError(t, err, query.errorMessage) - case query.errorMessage != "": - require.EqualError(t, st.NotUnshardedErr, query.errorMessage) - default: - require.NoError(t, err) - sel := parse.(*sqlparser.Select) - assert.Equal(t, query.recursiveExpectation, st.RecursiveDeps(extract(sel, 0)), "RecursiveDeps") - assert.Equal(t, query.expectation, st.DirectDeps(extract(sel, 0)), "DirectDeps") - } - }) - } -} - func TestJoinPredicateDependencies(t *testing.T) { // create table t() // create table t1(id bigint) @@ -995,15 +898,15 @@ func TestJoinPredicateDependencies(t *testing.T) { directExpect: MergeTableSets(TS0, TS1), }, { query: "select 1 from (select * from t1) x join t2 on x.id = t2.uid", - recursiveExpect: MergeTableSets(TS0, TS2), + recursiveExpect: MergeTableSets(TS0, TS1), directExpect: MergeTableSets(TS1, TS2), }, { query: "select 1 from (select id from t1) x join t2 on x.id = t2.uid", - recursiveExpect: MergeTableSets(TS0, TS2), + recursiveExpect: MergeTableSets(TS0, TS1), directExpect: MergeTableSets(TS1, TS2), }, { query: "select 1 from (select id from t1 union select id from t) x join t2 on x.id = t2.uid", - recursiveExpect: MergeTableSets(TS0, TS1, TS3), + recursiveExpect: MergeTableSets(TS0, TS1, TS2), directExpect: MergeTableSets(TS2, TS3), }} for _, query := range queries { @@ -1022,107 +925,6 @@ func TestJoinPredicateDependencies(t *testing.T) { } } -func TestDerivedTablesOrderClause(t *testing.T) { - queries := []struct { - query string - recursiveExpectation TableSet - expectation TableSet - }{{ - query: "select 1 from (select id from user) as t order by id", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select id from (select id from user) as t order by id", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select id from (select id from user) as t order by t.id", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select id as foo from (select id from user) as t order by foo", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select bar from (select id as bar from user) as t order by bar", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select bar as foo from (select id as bar from user) as t order by bar", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select bar as foo from (select id as bar from user) as t order by foo", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select bar as foo from (select id as bar, oo from user) as t order by oo", - recursiveExpectation: TS0, - expectation: TS1, - }, { - query: "select bar as foo from (select id, oo from user) as t(bar,oo) order by bar", - recursiveExpectation: TS0, - expectation: TS1, - }} - si := &FakeSI{Tables: map[string]*vindexes.Table{"t": {Name: sqlparser.NewIdentifierCS("t")}}} - for _, query := range queries { - t.Run(query.query, func(t *testing.T) { - parse, err := sqlparser.Parse(query.query) - require.NoError(t, err) - - st, err := Analyze(parse, "user", si) - require.NoError(t, err) - - sel := parse.(*sqlparser.Select) - assert.Equal(t, query.recursiveExpectation, st.RecursiveDeps(sel.OrderBy[0].Expr), "RecursiveDeps") - assert.Equal(t, query.expectation, st.DirectDeps(sel.OrderBy[0].Expr), "DirectDeps") - - }) - } -} - -func TestScopingWComplexDerivedTables(t *testing.T) { - queries := []struct { - query string - errorMessage string - rightExpectation TableSet - leftExpectation TableSet - }{ - { - query: "select 1 from user uu where exists (select 1 from user where exists (select 1 from (select 1 from t1) uu where uu.user_id = uu.id))", - rightExpectation: TS0, - leftExpectation: TS0, - }, - { - query: "select 1 from user.user uu where exists (select 1 from user.user as uu where exists (select 1 from (select 1 from user.t1) uu where uu.user_id = uu.id))", - rightExpectation: TS1, - leftExpectation: TS1, - }, - } - for _, query := range queries { - t.Run(query.query, func(t *testing.T) { - parse, err := sqlparser.Parse(query.query) - require.NoError(t, err) - st, err := Analyze(parse, "user", &FakeSI{ - Tables: map[string]*vindexes.Table{ - "t": {Name: sqlparser.NewIdentifierCS("t")}, - }, - }) - if query.errorMessage != "" { - require.EqualError(t, err, query.errorMessage) - } else { - require.NoError(t, err) - sel := parse.(*sqlparser.Select) - comparisonExpr := sel.Where.Expr.(*sqlparser.ExistsExpr).Subquery.Select.(*sqlparser.Select).Where.Expr.(*sqlparser.ExistsExpr).Subquery.Select.(*sqlparser.Select).Where.Expr.(*sqlparser.ComparisonExpr) - left := comparisonExpr.Left - right := comparisonExpr.Right - assert.Equal(t, query.leftExpectation, st.RecursiveDeps(left), "Left RecursiveDeps") - assert.Equal(t, query.rightExpectation, st.RecursiveDeps(right), "Right RecursiveDeps") - } - }) - } -} - func TestScopingWVindexTables(t *testing.T) { queries := []struct { query string @@ -1242,36 +1044,6 @@ func BenchmarkAnalyzeSubQueries(b *testing.B) { } } -func BenchmarkAnalyzeDerivedTableQueries(b *testing.B) { - queries := []string{ - "select id from (select x as id from user) as t", - "select id from (select foo as id from user) as t", - "select id from (select foo as id from (select x as foo from user) as c) as t", - "select t.id from (select foo as id from user) as t", - "select t.id2 from (select foo as id from user) as t", - "select id from (select 42 as id) as t", - "select t.id from (select 42 as id) as t", - "select ks.t.id from (select 42 as id) as t", - "select * from (select id, id from user) as t", - "select t.baz = 1 from (select id as baz from user) as t", - "select t.id from (select * from user, music) as t", - "select t.id from (select * from user, music) as t order by t.id", - "select t.id from (select * from user) as t join user as u on t.id = u.id", - "select t.col1 from t3 ua join (select t1.id, t1.col1 from t1 join t2) as t", - "select uu.id from (select id from t1) as uu where exists (select * from t2 as uu where uu.id = uu.uid)", - "select 1 from user uu where exists (select 1 from user where exists (select 1 from (select 1 from t1) uu where uu.user_id = uu.id))", - } - - for i := 0; i < b.N; i++ { - for _, query := range queries { - parse, err := sqlparser.Parse(query) - require.NoError(b, err) - - _, _ = Analyze(parse, "d", fakeSchemaInfo()) - } - } -} - func BenchmarkAnalyzeHavingQueries(b *testing.B) { queries := []string{ "select col from tabl having col = 1", @@ -1364,43 +1136,30 @@ func TestSingleUnshardedKeyspace(t *testing.T) { tests := []struct { query string unsharded *vindexes.Keyspace - tables []*vindexes.Table }{ { query: "select 1 from t, t1", unsharded: nil, // both tables are unsharded, but from different keyspaces - tables: nil, }, { query: "select 1 from t2", unsharded: nil, - tables: nil, }, { query: "select 1 from t, t2", unsharded: nil, - tables: nil, }, { query: "select 1 from t as A, t as B", - unsharded: ks1, - tables: []*vindexes.Table{ - {Keyspace: ks1, Name: sqlparser.NewIdentifierCS("t")}, - {Keyspace: ks1, Name: sqlparser.NewIdentifierCS("t")}, - }, + unsharded: unsharded, }, { query: "insert into t select * from t", - unsharded: ks1, - tables: []*vindexes.Table{ - {Keyspace: ks1, Name: sqlparser.NewIdentifierCS("t")}, - {Keyspace: ks1, Name: sqlparser.NewIdentifierCS("t")}, - }, + unsharded: unsharded, }, } for _, test := range tests { t.Run(test.query, func(t *testing.T) { _, semTable := parseAndAnalyze(t, test.query, "d") - queryIsUnsharded, tables := semTable.SingleUnshardedKeyspace() + queryIsUnsharded, _ := semTable.SingleUnshardedKeyspace() assert.Equal(t, test.unsharded, queryIsUnsharded) - assert.Equal(t, test.tables, tables) }) } } @@ -1481,13 +1240,13 @@ func TestScopingSubQueryJoinClause(t *testing.T) { } -var ks1 = &vindexes.Keyspace{ - Name: "ks1", +var unsharded = &vindexes.Keyspace{ + Name: "unsharded", Sharded: false, } var ks2 = &vindexes.Keyspace{ Name: "ks2", - Sharded: false, + Sharded: true, } var ks3 = &vindexes.Keyspace{ Name: "ks3", @@ -1498,24 +1257,52 @@ var ks3 = &vindexes.Keyspace{ // create table t1(id bigint) // create table t2(uid bigint, name varchar(255)) func fakeSchemaInfo() *FakeSI { - cols1 := []vindexes.Column{{ - Name: sqlparser.NewIdentifierCI("id"), - Type: querypb.Type_INT64, - }} - cols2 := []vindexes.Column{{ - Name: sqlparser.NewIdentifierCI("uid"), - Type: querypb.Type_INT64, - }, { - Name: sqlparser.NewIdentifierCI("name"), - Type: querypb.Type_VARCHAR, - }} - si := &FakeSI{ Tables: map[string]*vindexes.Table{ - "t": {Name: sqlparser.NewIdentifierCS("t"), Keyspace: ks1}, - "t1": {Name: sqlparser.NewIdentifierCS("t1"), Columns: cols1, ColumnListAuthoritative: true, Keyspace: ks2}, - "t2": {Name: sqlparser.NewIdentifierCS("t2"), Columns: cols2, ColumnListAuthoritative: true, Keyspace: ks3}, + "t": tableT(), + "t1": tableT1(), + "t2": tableT2(), }, } return si } + +func tableT() *vindexes.Table { + return &vindexes.Table{ + Name: sqlparser.NewIdentifierCS("t"), + Keyspace: unsharded, + } +} +func tableT1() *vindexes.Table { + return &vindexes.Table{ + Name: sqlparser.NewIdentifierCS("t1"), + Columns: []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("id"), + Type: querypb.Type_INT64, + }}, + ColumnListAuthoritative: true, + ColumnVindexes: []*vindexes.ColumnVindex{ + {Name: "id_vindex"}, + }, + Keyspace: ks2, + } +} +func tableT2() *vindexes.Table { + return &vindexes.Table{ + Name: sqlparser.NewIdentifierCS("t2"), + Columns: []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("uid"), + Type: querypb.Type_INT64, + }, { + Name: sqlparser.NewIdentifierCI("name"), + Type: querypb.Type_VARCHAR, + CollationName: "utf8_bin", + }, { + Name: sqlparser.NewIdentifierCI("textcol"), + Type: querypb.Type_VARCHAR, + CollationName: "big5_bin", + }}, + ColumnListAuthoritative: true, + Keyspace: ks3, + } +} diff --git a/go/vt/vtgate/semantics/binder.go b/go/vt/vtgate/semantics/binder.go index e3fed7e5a68..8657a0615a1 100644 --- a/go/vt/vtgate/semantics/binder.go +++ b/go/vt/vtgate/semantics/binder.go @@ -19,6 +19,8 @@ package semantics import ( "strings" + "vitess.io/vitess/go/vt/vterrors" + "vitess.io/vitess/go/vt/sqlparser" ) @@ -61,7 +63,7 @@ func (b *binder) up(cursor *sqlparser.Cursor) error { currScope := b.scoper.currentScope() for _, ident := range node.Using { name := sqlparser.NewColName(ident.String()) - deps, err := b.resolveColumn(name, currScope, true) + deps, err := b.resolveColumn(name, currScope, true, true) if err != nil { return err } @@ -69,7 +71,7 @@ func (b *binder) up(cursor *sqlparser.Cursor) error { } case *sqlparser.ColName: currentScope := b.scoper.currentScope() - deps, err := b.resolveColumn(node, currentScope, false) + deps, err := b.resolveColumn(node, currentScope, false, true) if err != nil { if deps.direct.IsEmpty() || !strings.HasSuffix(err.Error(), "is ambiguous") || @@ -155,7 +157,7 @@ func (b *binder) rewriteJoinUsingColName(deps dependency, node *sqlparser.ColNam Name: sqlparser.NewIdentifierCS(alias.String()), } } - deps, err = b.resolveColumn(node, currentScope, false) + deps, err = b.resolveColumn(node, currentScope, false, true) if err != nil { return dependency{}, err } @@ -196,29 +198,33 @@ func (b *binder) setSubQueryDependencies(subq *sqlparser.Subquery, currScope *sc b.direct[subq] = subqDirectDeps.KeepOnly(tablesToKeep) } -func (b *binder) resolveColumn(colName *sqlparser.ColName, current *scope, allowMulti bool) (dependency, error) { +func (b *binder) resolveColumn(colName *sqlparser.ColName, current *scope, allowMulti, singleTableFallBack bool) (dependency, error) { + if !current.stmtScope && current.inGroupBy { + return b.resolveColInGroupBy(colName, current, allowMulti, singleTableFallBack) + } + if !current.stmtScope && current.inHaving && !current.inHavingAggr { + return b.resolveColumnInHaving(colName, current, allowMulti) + } + var thisDeps dependencies first := true var tableName *sqlparser.TableName + for current != nil { var err error thisDeps, err = b.resolveColumnInScope(current, colName, allowMulti) if err != nil { - err = makeAmbiguousError(colName, err) - if thisDeps == nil { - return dependency{}, err - } + return dependency{}, makeAmbiguousError(colName, err) } if !thisDeps.empty() { - deps, thisErr := thisDeps.get() - if thisErr != nil { - err = makeAmbiguousError(colName, thisErr) - } - return deps, err - } else if err != nil { - return dependency{}, err + deps, err := thisDeps.get() + return deps, makeAmbiguousError(colName, err) } - if current.parent == nil && len(current.tables) == 1 && first && colName.Qualifier.IsEmpty() { + if current.parent == nil && + len(current.tables) == 1 && + first && + colName.Qualifier.IsEmpty() && + singleTableFallBack { // if this is the top scope, and we still haven't been able to find a match, we know we are about to fail // we can check this last scope and see if there is a single table. if there is just one table in the scope // we assume that the column is meant to come from this table. @@ -236,10 +242,150 @@ func (b *binder) resolveColumn(colName *sqlparser.ColName, current *scope, allow return dependency{}, ShardedError{&ColumnNotFoundError{Column: colName, Table: tableName}} } +func isColumnNotFound(err error) bool { + switch err := err.(type) { + case *ColumnNotFoundError: + return true + case ShardedError: + return isColumnNotFound(err.Inner) + default: + return false + } +} + +func (b *binder) resolveColumnInHaving(colName *sqlparser.ColName, current *scope, allowMulti bool) (dependency, error) { + if current.inHavingAggr { + // when inside an aggregation, we'll search the FROM clause before the SELECT expressions + deps, err := b.resolveColumn(colName, current.parent, allowMulti, true) + if !deps.direct.IsEmpty() || (err != nil && !isColumnNotFound(err)) { + return deps, err + } + } + + // Here we are searching among the SELECT expressions for a match + thisDeps, err := b.resolveColumnInScope(current, colName, allowMulti) + if err != nil { + return dependency{}, makeAmbiguousError(colName, err) + } + + if !thisDeps.empty() { + // we found something! let's return it + deps, err := thisDeps.get() + if err != nil { + err = makeAmbiguousError(colName, err) + } + return deps, err + } + + notFoundErr := &ColumnNotFoundClauseError{Column: colName.Name.String(), Clause: "having clause"} + if current.inHavingAggr { + // if we are inside an aggregation, we've already looked everywhere. now it's time to give up + return dependency{}, notFoundErr + } + + // Now we'll search the FROM clause, but with a twist. If we find it in the FROM clause, the column must also + // exist as a standalone expression in the SELECT list + deps, err := b.resolveColumn(colName, current.parent, allowMulti, true) + if deps.direct.IsEmpty() { + return dependency{}, notFoundErr + } + + sel := current.stmt.(*sqlparser.Select) // we can be sure of this, since HAVING doesn't exist on UNION + if selDeps := b.searchInSelectExpressions(colName, deps, sel); !selDeps.direct.IsEmpty() { + return selDeps, nil + } + + if !current.inHavingAggr && len(sel.GroupBy) == 0 { + // if we are not inside an aggregation, and there is no GROUP BY, we consider the FROM clause before failing + if !deps.direct.IsEmpty() || (err != nil && !isColumnNotFound(err)) { + return deps, err + } + } + + return dependency{}, notFoundErr +} + +// searchInSelectExpressions searches for the ColName among the SELECT and GROUP BY expressions +// It used dependency information to match the columns +func (b *binder) searchInSelectExpressions(colName *sqlparser.ColName, deps dependency, stmt *sqlparser.Select) dependency { + for _, selectExpr := range stmt.SelectExprs { + ae, ok := selectExpr.(*sqlparser.AliasedExpr) + if !ok { + continue + } + selectCol, ok := ae.Expr.(*sqlparser.ColName) + if !ok || !selectCol.Name.Equal(colName.Name) { + continue + } + + _, direct, _ := b.org.depsForExpr(selectCol) + if deps.direct == direct { + // we have found the ColName in the SELECT expressions, so it's safe to use here + direct, recursive, typ := b.org.depsForExpr(ae.Expr) + return dependency{certain: true, direct: direct, recursive: recursive, typ: typ} + } + } + + for _, gb := range stmt.GroupBy { + selectCol, ok := gb.(*sqlparser.ColName) + if !ok || !selectCol.Name.Equal(colName.Name) { + continue + } + + _, direct, _ := b.org.depsForExpr(selectCol) + if deps.direct == direct { + // we have found the ColName in the GROUP BY expressions, so it's safe to use here + direct, recursive, typ := b.org.depsForExpr(gb) + return dependency{certain: true, direct: direct, recursive: recursive, typ: typ} + } + } + return dependency{} +} + +// resolveColInGroupBy handles the special rules we have when binding on the GROUP BY column +func (b *binder) resolveColInGroupBy( + colName *sqlparser.ColName, + current *scope, + allowMulti bool, + singleTableFallBack bool, +) (dependency, error) { + if current.parent == nil { + return dependency{}, vterrors.VT13001("did not expect this to be the last scope") + } + // if we are in GROUP BY, we have to search the FROM clause before we search the SELECT expressions + deps, firstErr := b.resolveColumn(colName, current.parent, allowMulti, false) + if firstErr == nil { + return deps, nil + } + + // either we didn't find the column on a table, or it was ambiguous. + // in either case, next step is to search the SELECT expressions + if !colName.Qualifier.IsEmpty() { + // if the col name has a qualifier, none of the SELECT expressions are going to match + return dependency{}, nil + } + vtbl, ok := current.tables[0].(*vTableInfo) + if !ok { + return dependency{}, vterrors.VT13001("expected the table info to be a *vTableInfo") + } + + dependencies, err := vtbl.dependenciesInGroupBy(colName.Name.String(), b.org) + if err != nil { + return dependency{}, err + } + if dependencies.empty() { + if isColumnNotFound(firstErr) { + return dependency{}, &ColumnNotFoundClauseError{Column: colName.Name.String(), Clause: "group statement"} + } + return deps, firstErr + } + return dependencies.get() +} + func (b *binder) resolveColumnInScope(current *scope, expr *sqlparser.ColName, allowMulti bool) (dependencies, error) { var deps dependencies = ¬hing{} for _, table := range current.tables { - if !expr.Qualifier.IsEmpty() && !table.matches(expr.Qualifier) { + if !expr.Qualifier.IsEmpty() && !table.matches(expr.Qualifier) && !current.isUnion { continue } thisDeps, err := table.dependencies(expr.Name.String(), b.org) diff --git a/go/vt/vtgate/semantics/dependencies.go b/go/vt/vtgate/semantics/dependencies.go index 8e5a481e17d..4560a159c13 100644 --- a/go/vt/vtgate/semantics/dependencies.go +++ b/go/vt/vtgate/semantics/dependencies.go @@ -31,6 +31,7 @@ type ( merge(other dependencies, allowMulti bool) dependencies } dependency struct { + certain bool direct TableSet recursive TableSet typ *Type @@ -51,6 +52,7 @@ var ambigousErr = vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "ambiguous") func createCertain(direct TableSet, recursive TableSet, qt *Type) *certain { c := &certain{ dependency: dependency{ + certain: true, direct: direct, recursive: recursive, }, @@ -64,6 +66,7 @@ func createCertain(direct TableSet, recursive TableSet, qt *Type) *certain { func createUncertain(direct TableSet, recursive TableSet) *uncertain { return &uncertain{ dependency: dependency{ + certain: false, direct: direct, recursive: recursive, }, @@ -130,7 +133,7 @@ func (n *nothing) empty() bool { } func (n *nothing) get() (dependency, error) { - return dependency{}, nil + return dependency{certain: true}, nil } func (n *nothing) merge(d dependencies, _ bool) dependencies { diff --git a/go/vt/vtgate/semantics/derived_test.go b/go/vt/vtgate/semantics/derived_test.go new file mode 100644 index 00000000000..509c9925fb1 --- /dev/null +++ b/go/vt/vtgate/semantics/derived_test.go @@ -0,0 +1,265 @@ +/* +Copyright 2024 The Vitess Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package semantics + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "vitess.io/vitess/go/vt/sqlparser" + "vitess.io/vitess/go/vt/vtgate/vindexes" +) + +func TestScopingWDerivedTables(t *testing.T) { + queries := []struct { + query string + errorMessage string + recursiveDeps TableSet + directDeps TableSet + }{ + { + query: "select id from (select x as id from user) as t", + recursiveDeps: TS0, + directDeps: TS1, + }, { + query: "select id from (select foo as id from user) as t", + recursiveDeps: TS0, + directDeps: TS1, + }, { + query: "select id from (select foo as id from (select x as foo from user) as c) as t", + recursiveDeps: TS0, + directDeps: TS2, + }, { + query: "select t.id from (select foo as id from user) as t", + recursiveDeps: TS0, + directDeps: TS1, + }, { + query: "select t.id2 from (select foo as id from user) as t", + errorMessage: "column 't.id2' not found", + }, { + query: "select id from (select 42 as id) as t", + recursiveDeps: NoTables, + directDeps: TS1, + }, { + query: "select t.id from (select 42 as id) as t", + recursiveDeps: NoTables, + directDeps: TS1, + }, { + query: "select ks.t.id from (select 42 as id) as t", + errorMessage: "column 'ks.t.id' not found", + }, { + query: "select * from (select id, id from user) as t", + errorMessage: "Duplicate column name 'id'", + }, { + query: "select t.baz = 1 from (select id as baz from user) as t", + directDeps: TS1, + recursiveDeps: TS0, + }, { + query: "select t.id from (select * from user, music) as t", + directDeps: TS2, + recursiveDeps: MergeTableSets(TS0, TS1), + }, { + query: "select t.id from (select * from user, music) as t order by t.id", + directDeps: TS2, + recursiveDeps: MergeTableSets(TS0, TS1), + }, { + query: "select t.id from (select * from user) as t join user as u on t.id = u.id", + directDeps: TS2, + recursiveDeps: TS0, + }, { + query: "select t.col1 from t3 ua join (select t1.id, t1.col1 from t1 join t2) as t", + directDeps: TS3, + recursiveDeps: TS1, + }, { + query: "select uu.test from (select id from t1) uu", + errorMessage: "column 'uu.test' not found", + }, { + query: "select uu.id from (select id as col from t1) uu", + errorMessage: "column 'uu.id' not found", + }, { + query: "select uu.id from (select id as col from t1) uu", + errorMessage: "column 'uu.id' not found", + }, { + query: "select uu.id from (select id from t1) as uu where exists (select * from t2 as uu where uu.id = uu.uid)", + directDeps: TS2, + recursiveDeps: TS0, + }, { + query: "select 1 from user uu where exists (select 1 from user where exists (select 1 from (select 1 from t1) uu where uu.user_id = uu.id))", + directDeps: NoTables, + recursiveDeps: NoTables, + }, { + query: "select uu.count from (select count(*) as `count` from t1) uu", + directDeps: TS1, + recursiveDeps: TS0, + }} + for _, query := range queries { + t.Run(query.query, func(t *testing.T) { + parse, err := sqlparser.Parse(query.query) + require.NoError(t, err) + st, err := Analyze(parse, "user", &FakeSI{ + Tables: map[string]*vindexes.Table{ + "t": {Name: sqlparser.NewIdentifierCS("t"), Keyspace: ks2}, + }, + }) + + switch { + case query.errorMessage != "" && err != nil: + require.EqualError(t, err, query.errorMessage) + case query.errorMessage != "": + require.EqualError(t, st.NotUnshardedErr, query.errorMessage) + default: + require.NoError(t, err) + sel := parse.(*sqlparser.Select) + assert.Equal(t, query.recursiveDeps, st.RecursiveDeps(extract(sel, 0)), "RecursiveDeps") + assert.Equal(t, query.directDeps, st.DirectDeps(extract(sel, 0)), "DirectDeps") + } + }) + } +} + +func TestDerivedTablesOrderClause(t *testing.T) { + queries := []struct { + query string + recursiveExpectation TableSet + expectation TableSet + }{{ + query: "select 1 from (select id from user) as t order by id", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select id from (select id from user) as t order by id", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select id from (select id from user) as t order by t.id", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select id as foo from (select id from user) as t order by foo", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select bar from (select id as bar from user) as t order by bar", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select bar as foo from (select id as bar from user) as t order by bar", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select bar as foo from (select id as bar from user) as t order by foo", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select bar as foo from (select id as bar, oo from user) as t order by oo", + recursiveExpectation: TS0, + expectation: TS1, + }, { + query: "select bar as foo from (select id, oo from user) as t(bar,oo) order by bar", + recursiveExpectation: TS0, + expectation: TS1, + }} + si := &FakeSI{Tables: map[string]*vindexes.Table{"t": {Name: sqlparser.NewIdentifierCS("t")}}} + for _, query := range queries { + t.Run(query.query, func(t *testing.T) { + parse, err := sqlparser.Parse(query.query) + require.NoError(t, err) + + st, err := Analyze(parse, "user", si) + require.NoError(t, err) + + sel := parse.(*sqlparser.Select) + assert.Equal(t, query.recursiveExpectation, st.RecursiveDeps(sel.OrderBy[0].Expr), "RecursiveDeps") + assert.Equal(t, query.expectation, st.DirectDeps(sel.OrderBy[0].Expr), "DirectDeps") + + }) + } +} + +func TestScopingWComplexDerivedTables(t *testing.T) { + queries := []struct { + query string + errorMessage string + rightExpectation TableSet + leftExpectation TableSet + }{ + { + query: "select 1 from user uu where exists (select 1 from user where exists (select 1 from (select 1 from t1) uu where uu.user_id = uu.id))", + rightExpectation: TS0, + leftExpectation: TS0, + }, + { + query: "select 1 from user.user uu where exists (select 1 from user.user as uu where exists (select 1 from (select 1 from user.t1) uu where uu.user_id = uu.id))", + rightExpectation: TS1, + leftExpectation: TS1, + }, + } + for _, query := range queries { + t.Run(query.query, func(t *testing.T) { + parse, err := sqlparser.Parse(query.query) + require.NoError(t, err) + st, err := Analyze(parse, "user", &FakeSI{ + Tables: map[string]*vindexes.Table{ + "t": {Name: sqlparser.NewIdentifierCS("t")}, + }, + }) + if query.errorMessage != "" { + require.EqualError(t, err, query.errorMessage) + } else { + require.NoError(t, err) + sel := parse.(*sqlparser.Select) + comparisonExpr := sel.Where.Expr.(*sqlparser.ExistsExpr).Subquery.Select.(*sqlparser.Select).Where.Expr.(*sqlparser.ExistsExpr).Subquery.Select.(*sqlparser.Select).Where.Expr.(*sqlparser.ComparisonExpr) + left := comparisonExpr.Left + right := comparisonExpr.Right + assert.Equal(t, query.leftExpectation, st.RecursiveDeps(left), "Left RecursiveDeps") + assert.Equal(t, query.rightExpectation, st.RecursiveDeps(right), "Right RecursiveDeps") + } + }) + } +} + +func BenchmarkAnalyzeDerivedTableQueries(b *testing.B) { + queries := []string{ + "select id from (select x as id from user) as t", + "select id from (select foo as id from user) as t", + "select id from (select foo as id from (select x as foo from user) as c) as t", + "select t.id from (select foo as id from user) as t", + "select t.id2 from (select foo as id from user) as t", + "select id from (select 42 as id) as t", + "select t.id from (select 42 as id) as t", + "select ks.t.id from (select 42 as id) as t", + "select * from (select id, id from user) as t", + "select t.baz = 1 from (select id as baz from user) as t", + "select t.id from (select * from user, music) as t", + "select t.id from (select * from user, music) as t order by t.id", + "select t.id from (select * from user) as t join user as u on t.id = u.id", + "select t.col1 from t3 ua join (select t1.id, t1.col1 from t1 join t2) as t", + "select uu.id from (select id from t1) as uu where exists (select * from t2 as uu where uu.id = uu.uid)", + "select 1 from user uu where exists (select 1 from user where exists (select 1 from (select 1 from t1) uu where uu.user_id = uu.id))", + } + + for i := 0; i < b.N; i++ { + for _, query := range queries { + parse, err := sqlparser.Parse(query) + require.NoError(b, err) + + _, _ = Analyze(parse, "d", fakeSchemaInfo()) + } + } +} diff --git a/go/vt/vtgate/semantics/early_rewriter.go b/go/vt/vtgate/semantics/early_rewriter.go index d11d12023c4..ce47005c1d8 100644 --- a/go/vt/vtgate/semantics/early_rewriter.go +++ b/go/vt/vtgate/semantics/early_rewriter.go @@ -33,81 +33,105 @@ type earlyRewriter struct { clause string warning string expandedColumns map[sqlparser.TableName][]*sqlparser.ColName + aliasMapCache map[*sqlparser.Select]map[string]exprContainer + tables *tableCollector + + // reAnalyze is used when we are running in the late stage, after the other parts of semantic analysis + // have happened, and we are introducing or changing the AST. We invoke it so all parts of the query have been + // typed, scoped and bound correctly + reAnalyze func(n sqlparser.SQLNode) error } func (r *earlyRewriter) down(cursor *sqlparser.Cursor) error { switch node := cursor.Node().(type) { - case *sqlparser.Where: - handleWhereClause(node, cursor.Parent()) case sqlparser.SelectExprs: - return handleSelectExprs(r, cursor, node) + return r.handleSelectExprs(cursor, node) case *sqlparser.JoinTableExpr: - handleJoinTableExpr(r, node) - case sqlparser.OrderBy: - handleOrderBy(r, cursor, node) + r.handleJoinTableExprDown(node) case *sqlparser.OrExpr: rewriteOrExpr(cursor, node) case *sqlparser.NotExpr: rewriteNotExpr(cursor, node) - case sqlparser.GroupBy: - r.clause = "group statement" - case *sqlparser.Literal: - return handleLiteral(r, cursor, node) - case *sqlparser.CollateExpr: - return handleCollateExpr(r, node) case *sqlparser.ComparisonExpr: return handleComparisonExpr(cursor, node) } return nil } -func rewriteNotExpr(cursor *sqlparser.Cursor, node *sqlparser.NotExpr) { - cmp, ok := node.Expr.(*sqlparser.ComparisonExpr) - if !ok { - return +func (r *earlyRewriter) up(cursor *sqlparser.Cursor) error { + // this rewriting is done in the `up` phase, because we need the scope to have been + // filled in with the available tables + switch node := cursor.Node().(type) { + case *sqlparser.JoinTableExpr: + return r.handleJoinTableExprUp(node) + case sqlparser.GroupBy: + r.clause = "group clause" + iter := &exprIterator{ + node: node, + idx: -1, + } + return r.handleGroupBy(cursor.Parent(), iter) + case sqlparser.OrderBy: + r.clause = "order clause" + iter := &orderByIterator{ + node: node, + idx: -1, + r: r, + } + return r.handleOrderBy(cursor.Parent(), iter) + case *sqlparser.Where: + if node.Type == sqlparser.HavingClause { + return r.handleHavingClause(node, cursor.Parent()) + } + } - cmp.Operator = sqlparser.Inverse(cmp.Operator) - cursor.Replace(cmp) + return nil } -func (r *earlyRewriter) up(cursor *sqlparser.Cursor) error { - // this rewriting is done in the `up` phase, because we need the scope to have been - // filled in with the available tables - node, ok := cursor.Node().(*sqlparser.JoinTableExpr) - if !ok || len(node.Condition.Using) == 0 { +func (r *earlyRewriter) handleJoinTableExprUp(join *sqlparser.JoinTableExpr) error { + if len(join.Condition.Using) == 0 { return nil } - err := rewriteJoinUsing(r.binder, node) + err := rewriteJoinUsing(r.binder, join) if err != nil { return err } + return r.reAnalyze(join.Condition.On) +} - // since the binder has already been over the join, we need to invoke it again so it - // can bind columns to the right tables - sqlparser.Rewrite(node.Condition.On, nil, func(cursor *sqlparser.Cursor) bool { - innerErr := r.binder.up(cursor) - if innerErr == nil { - return true - } +func rewriteNotExpr(cursor *sqlparser.Cursor, node *sqlparser.NotExpr) { + cmp, ok := node.Expr.(*sqlparser.ComparisonExpr) + if !ok { + return + } - err = innerErr - return false - }) - return err + // There is no inverse operator for NullSafeEqualOp. + // There doesn't exist a null safe non-equality. + if cmp.Operator == sqlparser.NullSafeEqualOp { + return + } + cmp.Operator = sqlparser.Inverse(cmp.Operator) + cursor.Replace(cmp) } -// handleWhereClause processes WHERE clauses, specifically the HAVING clause. -func handleWhereClause(node *sqlparser.Where, parent sqlparser.SQLNode) { - if node.Type != sqlparser.HavingClause { - return +// handleHavingClause processes the HAVING clause +func (r *earlyRewriter) handleHavingClause(node *sqlparser.Where, parent sqlparser.SQLNode) error { + sel, ok := parent.(*sqlparser.Select) + if !ok { + return nil + } + expr, err := r.rewriteAliasesInHaving(node.Expr, sel) + if err != nil { + return err } - rewriteHavingAndOrderBy(node, parent) + node.Expr = expr + return r.reAnalyze(expr) } // handleSelectExprs expands * in SELECT expressions. -func handleSelectExprs(r *earlyRewriter, cursor *sqlparser.Cursor, node sqlparser.SelectExprs) error { +func (r *earlyRewriter) handleSelectExprs(cursor *sqlparser.Cursor, node sqlparser.SelectExprs) error { _, isSel := cursor.Parent().(*sqlparser.Select) if !isSel { return nil @@ -115,8 +139,8 @@ func handleSelectExprs(r *earlyRewriter, cursor *sqlparser.Cursor, node sqlparse return r.expandStar(cursor, node) } -// handleJoinTableExpr processes JOIN table expressions and handles the Straight Join type. -func handleJoinTableExpr(r *earlyRewriter, node *sqlparser.JoinTableExpr) { +// handleJoinTableExprDown processes JOIN table expressions and handles the Straight Join type. +func (r *earlyRewriter) handleJoinTableExprDown(node *sqlparser.JoinTableExpr) { if node.Join != sqlparser.StraightJoinType { return } @@ -124,93 +148,220 @@ func handleJoinTableExpr(r *earlyRewriter, node *sqlparser.JoinTableExpr) { r.warning = "straight join is converted to normal join" } -// handleOrderBy processes the ORDER BY clause. -func handleOrderBy(r *earlyRewriter, cursor *sqlparser.Cursor, node sqlparser.OrderBy) { - r.clause = "order clause" - rewriteHavingAndOrderBy(node, cursor.Parent()) +type orderByIterator struct { + node sqlparser.OrderBy + idx int + r *earlyRewriter } -// rewriteOrExpr rewrites OR expressions when the right side is FALSE. -func rewriteOrExpr(cursor *sqlparser.Cursor, node *sqlparser.OrExpr) { - newNode := rewriteOrFalse(*node) - if newNode != nil { - cursor.ReplaceAndRevisit(newNode) +func (it *orderByIterator) next() sqlparser.Expr { + it.idx++ + + if it.idx >= len(it.node) { + return nil } + + return it.node[it.idx].Expr } -// handleLiteral processes literals within the context of ORDER BY expressions. -func handleLiteral(r *earlyRewriter, cursor *sqlparser.Cursor, node *sqlparser.Literal) error { - newNode, err := r.rewriteOrderByExpr(node) - if err != nil { - return err - } - if newNode != nil { - cursor.Replace(newNode) +func (it *orderByIterator) replace(e sqlparser.Expr) error { + if it.idx >= len(it.node) { + return vterrors.VT13001("went past the last item") } + it.node[it.idx].Expr = e return nil } -// handleCollateExpr processes COLLATE expressions. -func handleCollateExpr(r *earlyRewriter, node *sqlparser.CollateExpr) error { - lit, ok := node.Expr.(*sqlparser.Literal) - if !ok { +type exprIterator struct { + node []sqlparser.Expr + idx int +} + +func (it *exprIterator) next() sqlparser.Expr { + it.idx++ + + if it.idx >= len(it.node) { return nil } - newNode, err := r.rewriteOrderByExpr(lit) + + return it.node[it.idx] +} + +func (it *exprIterator) replace(e sqlparser.Expr) error { + if it.idx >= len(it.node) { + return vterrors.VT13001("went past the last item") + } + it.node[it.idx] = e + return nil +} + +type iterator interface { + next() sqlparser.Expr + replace(e sqlparser.Expr) error +} + +func (r *earlyRewriter) replaceLiteralsInOrderBy(e sqlparser.Expr, iter iterator) (bool, error) { + lit := getIntLiteral(e) + if lit == nil { + return false, nil + } + + newExpr, recheck, err := r.rewriteOrderByLiteral(lit) + if err != nil { + return false, err + } + + if getIntLiteral(newExpr) == nil { + coll, ok := e.(*sqlparser.CollateExpr) + if ok { + coll.Expr = newExpr + newExpr = coll + } + } else { + // the expression is still a literal int. that means that we don't really need to sort by it. + // we'll just replace the number with a string instead, just like mysql would do in this situation + // mysql> explain select 1 as foo from user group by 1; + // + // mysql> show warnings; + // +-------+------+-----------------------------------------------------------------+ + // | Level | Code | Message | + // +-------+------+-----------------------------------------------------------------+ + // | Note | 1003 | /* select#1 */ select 1 AS `foo` from `test`.`user` group by '' | + // +-------+------+-----------------------------------------------------------------+ + newExpr = sqlparser.NewStrLiteral("") + } + + err = iter.replace(newExpr) if err != nil { - return err + return false, err } - if newNode != nil { - node.Expr = newNode + if recheck { + err = r.reAnalyze(newExpr) } - return nil + if err != nil { + return false, err + } + return true, nil } -// handleComparisonExpr processes Comparison expressions, specifically for tuples with equal length and EqualOp operator. -func handleComparisonExpr(cursor *sqlparser.Cursor, node *sqlparser.ComparisonExpr) error { - lft, lftOK := node.Left.(sqlparser.ValTuple) - rgt, rgtOK := node.Right.(sqlparser.ValTuple) - if !lftOK || !rgtOK || len(lft) != len(rgt) || node.Operator != sqlparser.EqualOp { +func (r *earlyRewriter) replaceLiteralsInGroupBy(e sqlparser.Expr) (sqlparser.Expr, error) { + lit := getIntLiteral(e) + if lit == nil { + return nil, nil + } + + newExpr, err := r.rewriteGroupByExpr(lit) + if err != nil { + return nil, err + } + + if getIntLiteral(newExpr) == nil { + coll, ok := e.(*sqlparser.CollateExpr) + if ok { + coll.Expr = newExpr + newExpr = coll + } + } else { + // the expression is still a literal int. that means that we don't really need to sort by it. + // we'll just replace the number with a string instead, just like mysql would do in this situation + // mysql> explain select 1 as foo from user group by 1; + // + // mysql> show warnings; + // +-------+------+-----------------------------------------------------------------+ + // | Level | Code | Message | + // +-------+------+-----------------------------------------------------------------+ + // | Note | 1003 | /* select#1 */ select 1 AS `foo` from `test`.`user` group by '' | + // +-------+------+-----------------------------------------------------------------+ + newExpr = sqlparser.NewStrLiteral("") + } + + return newExpr, nil +} + +func getIntLiteral(e sqlparser.Expr) *sqlparser.Literal { + var lit *sqlparser.Literal + switch node := e.(type) { + case *sqlparser.Literal: + lit = node + case *sqlparser.CollateExpr: + expr, ok := node.Expr.(*sqlparser.Literal) + if !ok { + return nil + } + lit = expr + default: return nil } - var predicates []sqlparser.Expr - for i, l := range lft { - r := rgt[i] - predicates = append(predicates, &sqlparser.ComparisonExpr{ - Operator: sqlparser.EqualOp, - Left: l, - Right: r, - Escape: node.Escape, - }) + if lit.Type != sqlparser.IntVal { + return nil } - cursor.Replace(sqlparser.AndExpressions(predicates...)) - return nil + return lit } -func (r *earlyRewriter) expandStar(cursor *sqlparser.Cursor, node sqlparser.SelectExprs) error { - currentScope := r.scoper.currentScope() - var selExprs sqlparser.SelectExprs - changed := false - for _, selectExpr := range node { - starExpr, isStarExpr := selectExpr.(*sqlparser.StarExpr) - if !isStarExpr { - selExprs = append(selExprs, selectExpr) +// handleOrderBy processes the ORDER BY clause. +func (r *earlyRewriter) handleOrderBy(parent sqlparser.SQLNode, iter iterator) error { + stmt, ok := parent.(sqlparser.SelectStatement) + if !ok { + return nil + } + + sel := sqlparser.GetFirstSelect(stmt) + for e := iter.next(); e != nil; e = iter.next() { + lit, err := r.replaceLiteralsInOrderBy(e, iter) + if err != nil { + return err + } + if lit { continue } - starExpanded, colNames, err := r.expandTableColumns(starExpr, currentScope.tables, r.binder.usingJoinInfo, r.scoper.org) + + expr, err := r.rewriteAliasesInOrderBy(e, sel) if err != nil { return err } - if !starExpanded || colNames == nil { - selExprs = append(selExprs, selectExpr) - continue + + if err = iter.replace(expr); err != nil { + return err + } + + if err = r.reAnalyze(expr); err != nil { + return err } - selExprs = append(selExprs, colNames...) - changed = true } - if changed { - cursor.ReplaceAndRevisit(selExprs) + + return nil +} + +// handleGroupBy processes the GROUP BY clause. +func (r *earlyRewriter) handleGroupBy(parent sqlparser.SQLNode, iter iterator) error { + stmt, ok := parent.(sqlparser.SelectStatement) + if !ok { + return nil + } + + sel := sqlparser.GetFirstSelect(stmt) + for e := iter.next(); e != nil; e = iter.next() { + expr, err := r.replaceLiteralsInGroupBy(e) + if err != nil { + return err + } + if expr == nil { + expr, err = r.rewriteAliasesInGroupBy(e, sel) + if err != nil { + return err + } + + } + err = iter.replace(expr) + if err != nil { + return err + } + + if err = r.reAnalyze(expr); err != nil { + return err + } } + return nil } @@ -220,88 +371,372 @@ func (r *earlyRewriter) expandStar(cursor *sqlparser.Cursor, node sqlparser.Sele // in SELECT points to that expression, not any table column. // - However, if the aliased expression is an aggregation and the column identifier in // the HAVING/ORDER BY clause is inside an aggregation function, the rule does not apply. -func rewriteHavingAndOrderBy(node, parent sqlparser.SQLNode) { - sel, isSel := parent.(*sqlparser.Select) - if !isSel { - return +func (r *earlyRewriter) rewriteAliasesInGroupBy(node sqlparser.Expr, sel *sqlparser.Select) (expr sqlparser.Expr, err error) { + type ExprContainer struct { + expr sqlparser.Expr + ambiguous bool + } + + currentScope := r.scoper.currentScope() + aliases := r.getAliasMap(sel) + insideAggr := false + downF := func(node, _ sqlparser.SQLNode) bool { + switch node.(type) { + case *sqlparser.Subquery: + return false + case sqlparser.AggrFunc: + insideAggr = true + } + + return true } - sqlparser.SafeRewrite(node, avoidSubqueries, - func(cursor *sqlparser.Cursor) bool { - col, ok := cursor.Node().(*sqlparser.ColName) - if !ok || !col.Qualifier.IsEmpty() { + output := sqlparser.CopyOnRewrite(node, downF, func(cursor *sqlparser.CopyOnWriteCursor) { + switch col := cursor.Node().(type) { + case sqlparser.AggrFunc: + insideAggr = false + case *sqlparser.ColName: + if !col.Qualifier.IsEmpty() { // we are only interested in columns not qualified by table names - return true + break } - _, parentIsAggr := cursor.Parent().(sqlparser.AggrFunc) + item, found := aliases[col.Name.Lowered()] + if !found { + break + } - // Iterate through SELECT expressions. - for _, e := range sel.SelectExprs { - ae, ok := e.(*sqlparser.AliasedExpr) - if !ok || !ae.As.Equal(col.Name) { - // we are searching for aliased expressions that match the column we have found - continue - } + isColumnOnTable, sure := r.isColumnOnTable(col, currentScope) + if found && isColumnOnTable { + r.warning = fmt.Sprintf("Column '%s' in group statement is ambiguous", sqlparser.String(col)) + } - expr := ae.Expr - if parentIsAggr { - if _, aliasPointsToAggr := expr.(sqlparser.AggrFunc); aliasPointsToAggr { - return false - } - } + if isColumnOnTable && sure { + break + } - if isSafeToRewrite(expr) { - cursor.Replace(expr) - } + if !sure { + r.warning = "Missing table info, so not binding to anything on the FROM clause" } - return true - }) -} -func avoidSubqueries(node, _ sqlparser.SQLNode) bool { - _, isSubQ := node.(*sqlparser.Subquery) - return !isSubQ + if item.ambiguous { + err = &AmbiguousColumnError{Column: sqlparser.String(col)} + } else if insideAggr && sqlparser.ContainsAggregation(item.expr) { + err = &InvalidUseOfGroupFunction{} + } + if err != nil { + cursor.StopTreeWalk() + return + } + + cursor.Replace(sqlparser.CloneExpr(item.expr)) + } + }, nil) + + expr = output.(sqlparser.Expr) + return } -func isSafeToRewrite(e sqlparser.Expr) bool { - safeToRewrite := true - _ = sqlparser.Walk(func(node sqlparser.SQLNode) (kontinue bool, err error) { +func (r *earlyRewriter) rewriteAliasesInHaving(node sqlparser.Expr, sel *sqlparser.Select) (expr sqlparser.Expr, err error) { + currentScope := r.scoper.currentScope() + if currentScope.isUnion { + // It is not safe to rewrite order by clauses in unions. + return node, nil + } + + aliases := r.getAliasMap(sel) + insideAggr := false + dontEnterSubquery := func(node, _ sqlparser.SQLNode) bool { switch node.(type) { + case *sqlparser.Subquery: + return false + case sqlparser.AggrFunc: + insideAggr = true + } + + return true + } + output := sqlparser.CopyOnRewrite(node, dontEnterSubquery, func(cursor *sqlparser.CopyOnWriteCursor) { + var col *sqlparser.ColName + + switch node := cursor.Node().(type) { + case sqlparser.AggrFunc: + insideAggr = false + return case *sqlparser.ColName: - safeToRewrite = false - return false, nil + col = node + default: + return + } + + if !col.Qualifier.IsEmpty() { + // we are only interested in columns not qualified by table names + return + } + + item, found := aliases[col.Name.Lowered()] + if insideAggr { + // inside aggregations, we want to first look for columns in the FROM clause + isColumnOnTable, sure := r.isColumnOnTable(col, currentScope) + if isColumnOnTable { + if found && sure { + r.warning = fmt.Sprintf("Column '%s' in having clause is ambiguous", sqlparser.String(col)) + } + return + } + } else if !found { + // if outside aggregations, we don't care about FROM columns + // if there is no matching alias, there is no rewriting needed + return + } + + // If we get here, it means we have found an alias and want to use it + if item.ambiguous { + err = &AmbiguousColumnError{Column: sqlparser.String(col)} + } else if insideAggr && sqlparser.ContainsAggregation(item.expr) { + err = &InvalidUseOfGroupFunction{} + } + if err != nil { + cursor.StopTreeWalk() + return + } + + newColName := sqlparser.CopyOnRewrite(item.expr, nil, r.fillInQualifiers, nil) + + cursor.Replace(newColName) + }, nil) + + expr = output.(sqlparser.Expr) + return +} + +// rewriteAliasesInOrderBy rewrites columns in the ORDER BY to use aliases +// from the SELECT expressions when applicable, following MySQL scoping rules: +// - A column identifier without a table qualifier that matches an alias introduced +// in SELECT points to that expression, not any table column. +// - However, if the aliased expression is an aggregation and the column identifier in +// the HAVING/ORDER BY clause is inside an aggregation function, the rule does not apply. +func (r *earlyRewriter) rewriteAliasesInOrderBy(node sqlparser.Expr, sel *sqlparser.Select) (expr sqlparser.Expr, err error) { + currentScope := r.scoper.currentScope() + if currentScope.isUnion { + // It is not safe to rewrite order by clauses in unions. + return node, nil + } + + aliases := r.getAliasMap(sel) + insideAggr := false + dontEnterSubquery := func(node, _ sqlparser.SQLNode) bool { + switch node.(type) { + case *sqlparser.Subquery: + return false case sqlparser.AggrFunc: - return false, nil + insideAggr = true } - return true, nil - }, e) - return safeToRewrite + + return true + } + output := sqlparser.CopyOnRewrite(node, dontEnterSubquery, func(cursor *sqlparser.CopyOnWriteCursor) { + var col *sqlparser.ColName + + switch node := cursor.Node().(type) { + case sqlparser.AggrFunc: + insideAggr = false + return + case *sqlparser.ColName: + col = node + default: + return + } + + if !col.Qualifier.IsEmpty() { + // we are only interested in columns not qualified by table names + return + } + + var item exprContainer + var found bool + + item, found = aliases[col.Name.Lowered()] + if !found { + // if there is no matching alias, there is no rewriting needed + return + } + isColumnOnTable, sure := r.isColumnOnTable(col, currentScope) + if found && isColumnOnTable && sure { + r.warning = fmt.Sprintf("Column '%s' in order by statement is ambiguous", sqlparser.String(col)) + } + + topLevel := col == node + if isColumnOnTable && sure && !topLevel { + // we only want to replace columns that are not coming from the table + return + } + + if !sure { + r.warning = "Missing table info, so not binding to anything on the FROM clause" + } + + if item.ambiguous { + err = &AmbiguousColumnError{Column: sqlparser.String(col)} + } else if insideAggr && sqlparser.ContainsAggregation(item.expr) { + err = &InvalidUseOfGroupFunction{} + } + if err != nil { + cursor.StopTreeWalk() + return + } + + newColName := sqlparser.CopyOnRewrite(item.expr, nil, r.fillInQualifiers, nil) + + cursor.Replace(newColName) + }, nil) + + expr = output.(sqlparser.Expr) + return } -func (r *earlyRewriter) rewriteOrderByExpr(node *sqlparser.Literal) (sqlparser.Expr, error) { - currScope, found := r.scoper.specialExprScopes[node] +// fillInQualifiers adds qualifiers to any columns we have rewritten +func (r *earlyRewriter) fillInQualifiers(cursor *sqlparser.CopyOnWriteCursor) { + col, ok := cursor.Node().(*sqlparser.ColName) + if !ok || !col.Qualifier.IsEmpty() { + return + } + ts, found := r.binder.direct[col] if !found { - return nil, nil + panic("uh oh") + } + tbl := r.tables.Tables[ts.TableOffset()] + tblName, err := tbl.Name() + if err != nil { + panic(err) + } + cursor.Replace(sqlparser.NewColNameWithQualifier(col.Name.String(), tblName)) +} + +func (r *earlyRewriter) isColumnOnTable(col *sqlparser.ColName, currentScope *scope) (isColumn bool, isCertain bool) { + if !currentScope.stmtScope && currentScope.parent != nil { + currentScope = currentScope.parent + } + deps, err := r.binder.resolveColumn(col, currentScope, false, false) + if err != nil { + return false, true + } + return true, deps.certain +} + +func (r *earlyRewriter) getAliasMap(sel *sqlparser.Select) (aliases map[string]exprContainer) { + var found bool + aliases, found = r.aliasMapCache[sel] + if found { + return + } + aliases = map[string]exprContainer{} + for _, e := range sel.SelectExprs { + ae, ok := e.(*sqlparser.AliasedExpr) + if !ok { + continue + } + + var alias string + + item := exprContainer{expr: ae.Expr} + if !ae.As.IsEmpty() { + alias = ae.As.Lowered() + } else if col, ok := ae.Expr.(*sqlparser.ColName); ok { + alias = col.Name.Lowered() + } + + if old, alreadyExists := aliases[alias]; alreadyExists && !sqlparser.Equals.Expr(old.expr, item.expr) { + item.ambiguous = true + } + + aliases[alias] = item + } + return aliases +} + +type exprContainer struct { + expr sqlparser.Expr + ambiguous bool +} + +func (r *earlyRewriter) rewriteOrderByLiteral(node *sqlparser.Literal) (expr sqlparser.Expr, needReAnalysis bool, err error) { + scope, found := r.scoper.specialExprScopes[node] + if !found { + return node, false, nil + } + num, err := strconv.Atoi(node.Val) + if err != nil { + return nil, false, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "error parsing column number: %s", node.Val) + } + + stmt, isSel := scope.stmt.(*sqlparser.Select) + if !isSel { + return nil, false, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "error invalid statement type, expect Select, got: %T", scope.stmt) + } + + if num < 1 || num > len(stmt.SelectExprs) { + return nil, false, vterrors.NewErrorf(vtrpcpb.Code_INVALID_ARGUMENT, vterrors.BadFieldError, "Unknown column '%d' in '%s'", num, r.clause) + } + + // We loop like this instead of directly accessing the offset, to make sure there are no unexpanded `*` before + for i := 0; i < num; i++ { + if _, ok := stmt.SelectExprs[i].(*sqlparser.AliasedExpr); !ok { + return nil, false, vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "cannot use column offsets in %s when using `%s`", r.clause, sqlparser.String(stmt.SelectExprs[i])) + } + } + + colOffset := num - 1 + aliasedExpr, ok := stmt.SelectExprs[colOffset].(*sqlparser.AliasedExpr) + if !ok { + return nil, false, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "don't know how to handle %s", sqlparser.String(node)) + } + + if scope.isUnion { + colName := sqlparser.NewColName(aliasedExpr.ColumnName()) + vtabl, ok := scope.tables[0].(*vTableInfo) + if !ok { + panic("BUG: not expected") + } + + // since column names can be ambiguous here, we want to do the binding by offset and not by column name + allColExprs := vtabl.cols[colOffset] + direct, recursive, typ := r.binder.org.depsForExpr(allColExprs) + r.binder.direct[colName] = direct + r.binder.recursive[colName] = recursive + if typ != nil { + r.binder.typer.exprTypes[colName] = *typ + } + + return colName, false, nil + } + + return realCloneOfColNames(aliasedExpr.Expr, false), true, nil +} + +func (r *earlyRewriter) rewriteGroupByExpr(node *sqlparser.Literal) (sqlparser.Expr, error) { + scope, found := r.scoper.specialExprScopes[node] + if !found { + return node, nil } num, err := strconv.Atoi(node.Val) if err != nil { return nil, vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, "error parsing column number: %s", node.Val) } - stmt, isSel := currScope.stmt.(*sqlparser.Select) + + stmt, isSel := scope.stmt.(*sqlparser.Select) if !isSel { - return nil, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "error invalid statement type, expect Select, got: %T", currScope.stmt) + return nil, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "error invalid statement type, expect Select, got: %T", scope.stmt) } if num < 1 || num > len(stmt.SelectExprs) { return nil, vterrors.NewErrorf(vtrpcpb.Code_INVALID_ARGUMENT, vterrors.BadFieldError, "Unknown column '%d' in '%s'", num, r.clause) } + // We loop like this instead of directly accessing the offset, to make sure there are no unexpanded `*` before for i := 0; i < num; i++ { - expr := stmt.SelectExprs[i] - _, ok := expr.(*sqlparser.AliasedExpr) - if !ok { - return nil, vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "cannot use column offsets in %s when using `%s`", r.clause, sqlparser.String(expr)) + if _, ok := stmt.SelectExprs[i].(*sqlparser.AliasedExpr); !ok { + return nil, vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "cannot use column offsets in %s when using `%s`", r.clause, sqlparser.String(stmt.SelectExprs[i])) } } @@ -310,12 +745,68 @@ func (r *earlyRewriter) rewriteOrderByExpr(node *sqlparser.Literal) (sqlparser.E return nil, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "don't know how to handle %s", sqlparser.String(node)) } - if !aliasedExpr.As.IsEmpty() { - return sqlparser.NewColName(aliasedExpr.As.String()), nil + if scope.isUnion { + colName := sqlparser.NewColName(aliasedExpr.ColumnName()) + return colName, nil + } + + return realCloneOfColNames(aliasedExpr.Expr, false), nil +} + +// rewriteOrExpr rewrites OR expressions when the right side is FALSE. +func rewriteOrExpr(cursor *sqlparser.Cursor, node *sqlparser.OrExpr) { + newNode := rewriteOrFalse(*node) + if newNode != nil { + cursor.ReplaceAndRevisit(newNode) + } +} + +// handleComparisonExpr processes Comparison expressions, specifically for tuples with equal length and EqualOp operator. +func handleComparisonExpr(cursor *sqlparser.Cursor, node *sqlparser.ComparisonExpr) error { + lft, lftOK := node.Left.(sqlparser.ValTuple) + rgt, rgtOK := node.Right.(sqlparser.ValTuple) + if !lftOK || !rgtOK || len(lft) != len(rgt) || node.Operator != sqlparser.EqualOp { + return nil + } + var predicates []sqlparser.Expr + for i, l := range lft { + r := rgt[i] + predicates = append(predicates, &sqlparser.ComparisonExpr{ + Operator: sqlparser.EqualOp, + Left: l, + Right: r, + Escape: node.Escape, + }) } + cursor.Replace(sqlparser.AndExpressions(predicates...)) + return nil +} - expr := realCloneOfColNames(aliasedExpr.Expr, currScope.isUnion) - return expr, nil +func (r *earlyRewriter) expandStar(cursor *sqlparser.Cursor, node sqlparser.SelectExprs) error { + currentScope := r.scoper.currentScope() + var selExprs sqlparser.SelectExprs + changed := false + for _, selectExpr := range node { + starExpr, isStarExpr := selectExpr.(*sqlparser.StarExpr) + if !isStarExpr { + selExprs = append(selExprs, selectExpr) + continue + } + starExpanded, colNames, err := r.expandTableColumns(starExpr, currentScope.tables, r.binder.usingJoinInfo, r.scoper.org) + if err != nil { + return err + } + if !starExpanded || colNames == nil { + selExprs = append(selExprs, selectExpr) + continue + } + selExprs = append(selExprs, colNames...) + changed = true + } + if changed { + cursor.ReplaceAndRevisit(selExprs) + } + return nil } // realCloneOfColNames clones all the expressions including ColName. @@ -610,8 +1101,7 @@ type expanderState struct { // addColumn adds columns to the expander state. If we have vschema info about the query, // we also store which columns were expanded func (e *expanderState) addColumn(col ColumnInfo, tbl TableInfo, tblName sqlparser.TableName) { - tableAliased := !tbl.GetExpr().As.IsEmpty() - withQualifier := e.needsQualifier || tableAliased + withQualifier := e.needsQualifier var colName *sqlparser.ColName var alias sqlparser.IdentifierCI if withQualifier { diff --git a/go/vt/vtgate/semantics/early_rewriter_test.go b/go/vt/vtgate/semantics/early_rewriter_test.go index bd919fe9201..ba5510c5bf5 100644 --- a/go/vt/vtgate/semantics/early_rewriter_test.go +++ b/go/vt/vtgate/semantics/early_rewriter_test.go @@ -32,7 +32,7 @@ import ( func TestExpandStar(t *testing.T) { ks := &vindexes.Keyspace{ Name: "main", - Sharded: false, + Sharded: true, } schemaInfo := &FakeSI{ Tables: map[string]*vindexes.Table{ @@ -166,20 +166,20 @@ func TestExpandStar(t *testing.T) { expanded: "main.t1.a, main.t1.b, main.t1.c, main.t5.a", }, { sql: "select * from t1 join t5 using (b) having b = 12", - expSQL: "select t1.b as b, t1.a as a, t1.c as c, t5.a as a from t1 join t5 on t1.b = t5.b having b = 12", + expSQL: "select t1.b as b, t1.a as a, t1.c as c, t5.a as a from t1 join t5 on t1.b = t5.b having t1.b = 12", }, { - sql: "select 1 from t1 join t5 using (b) having b = 12", - expSQL: "select 1 from t1 join t5 on t1.b = t5.b having t1.b = 12", + sql: "select 1 from t1 join t5 using (b) where b = 12", + expSQL: "select 1 from t1 join t5 on t1.b = t5.b where t1.b = 12", }, { sql: "select * from (select 12) as t", - expSQL: "select t.`12` from (select 12 from dual) as t", + expSQL: "select `12` from (select 12 from dual) as t", }, { sql: "SELECT * FROM (SELECT *, 12 AS foo FROM t3) as results", expSQL: "select * from (select *, 12 as foo from t3) as results", }, { // if we are only star-expanding authoritative tables, we don't need to stop the expansion sql: "SELECT * FROM (SELECT t2.*, 12 AS foo FROM t3, t2) as results", - expSQL: "select results.c1, results.c2, results.foo from (select t2.c1 as c1, t2.c2 as c2, 12 as foo from t3, t2) as results", + expSQL: "select c1, c2, foo from (select t2.c1 as c1, t2.c2 as c2, 12 as foo from t3, t2) as results", }} for _, tcase := range tcases { t.Run(tcase.sql, func(t *testing.T) { @@ -300,33 +300,168 @@ func TestRewriteJoinUsingColumns(t *testing.T) { } -func TestOrderByGroupByLiteral(t *testing.T) { +func TestGroupByColumnName(t *testing.T) { schemaInfo := &FakeSI{ - Tables: map[string]*vindexes.Table{}, + Tables: map[string]*vindexes.Table{ + "t1": { + Name: sqlparser.NewIdentifierCS("t1"), + Columns: []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("id"), + Type: sqltypes.Int32, + }, { + Name: sqlparser.NewIdentifierCI("col1"), + Type: sqltypes.Int32, + }}, + ColumnListAuthoritative: true, + }, + "t2": { + Name: sqlparser.NewIdentifierCS("t2"), + Columns: []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("id"), + Type: sqltypes.Int32, + }, { + Name: sqlparser.NewIdentifierCI("col2"), + Type: sqltypes.Int32, + }}, + ColumnListAuthoritative: true, + }, + }, } cDB := "db" tcases := []struct { - sql string - expSQL string - expErr string + sql string + expSQL string + expDeps TableSet + expErr string + warning string }{{ - sql: "select 1 as id from t1 order by 1", - expSQL: "select 1 as id from t1 order by id asc", + sql: "select t3.col from t3 group by kj", + expSQL: "select t3.col from t3 group by kj", + expDeps: TS0, + }, { + sql: "select t2.col2 as xyz from t2 group by xyz", + expSQL: "select t2.col2 as xyz from t2 group by t2.col2", + expDeps: TS0, }, { - sql: "select t1.col from t1 order by 1", - expSQL: "select t1.col from t1 order by t1.col asc", + sql: "select id from t1 group by unknown", + expErr: "Unknown column 'unknown' in 'group statement'", }, { - sql: "select t1.col from t1 group by 1", - expSQL: "select t1.col from t1 group by t1.col", + sql: "select t1.c as x, sum(t2.id) as x from t1 join t2 group by x", + expErr: "VT03005: cannot group on 'x'", }, { - sql: "select t1.col as xyz from t1 group by 1", - expSQL: "select t1.col as xyz from t1 group by xyz", + sql: "select t1.col1, sum(t2.id) as col1 from t1 join t2 group by col1", + expSQL: "select t1.col1, sum(t2.id) as col1 from t1 join t2 group by col1", + expDeps: TS0, + warning: "Column 'col1' in group statement is ambiguous", + }, { + sql: "select t2.col2 as id, sum(t2.id) as x from t1 join t2 group by id", + expSQL: "select t2.col2 as id, sum(t2.id) as x from t1 join t2 group by t2.col2", + expDeps: TS1, + }, { + sql: "select sum(t2.col2) as id, sum(t2.id) as x from t1 join t2 group by id", + expErr: "VT03005: cannot group on 'id'", + }, { + sql: "select count(*) as x from t1 group by x", + expErr: "VT03005: cannot group on 'x'", + }} + for _, tcase := range tcases { + t.Run(tcase.sql, func(t *testing.T) { + ast, err := sqlparser.Parse(tcase.sql) + require.NoError(t, err) + selectStatement := ast.(*sqlparser.Select) + st, err := AnalyzeStrict(selectStatement, cDB, schemaInfo) + if tcase.expErr == "" { + require.NoError(t, err) + assert.Equal(t, tcase.expSQL, sqlparser.String(selectStatement)) + gb := selectStatement.GroupBy + deps := st.RecursiveDeps(gb[0]) + assert.Equal(t, tcase.expDeps, deps) + assert.Equal(t, tcase.warning, st.Warning) + } else { + require.EqualError(t, err, tcase.expErr) + } + }) + } +} + +func TestGroupByLiteral(t *testing.T) { + schemaInfo := &FakeSI{ + Tables: map[string]*vindexes.Table{}, + } + cDB := "db" + tcases := []struct { + sql string + expSQL string + expDeps TableSet + expErr string + }{{ + sql: "select t1.col from t1 group by 1", + expSQL: "select t1.col from t1 group by t1.col", + expDeps: TS0, }, { - sql: "select t1.col as xyz, count(*) from t1 group by 1 order by 2", - expSQL: "select t1.col as xyz, count(*) from t1 group by xyz order by count(*) asc", + sql: "select t1.col as xyz from t1 group by 1", + expSQL: "select t1.col as xyz from t1 group by t1.col", + expDeps: TS0, }, { sql: "select id from t1 group by 2", - expErr: "Unknown column '2' in 'group statement'", + expErr: "Unknown column '2' in 'group clause'", + }, { + sql: "select *, id from t1 group by 2", + expErr: "cannot use column offsets in group clause when using `*`", + }} + for _, tcase := range tcases { + t.Run(tcase.sql, func(t *testing.T) { + ast, err := sqlparser.Parse(tcase.sql) + require.NoError(t, err) + selectStatement := ast.(*sqlparser.Select) + st, err := Analyze(selectStatement, cDB, schemaInfo) + if tcase.expErr == "" { + require.NoError(t, err) + assert.Equal(t, tcase.expSQL, sqlparser.String(selectStatement)) + gb := selectStatement.GroupBy + deps := st.RecursiveDeps(gb[0]) + assert.Equal(t, tcase.expDeps, deps) + } else { + require.EqualError(t, err, tcase.expErr) + } + }) + } +} + +func TestOrderByLiteral(t *testing.T) { + schemaInfo := &FakeSI{ + Tables: map[string]*vindexes.Table{}, + } + cDB := "db" + tcases := []struct { + sql string + expSQL string + expDeps TableSet + expErr string + }{{ + sql: "select 1 as id from t1 order by 1", + expSQL: "select 1 as id from t1 order by '' asc", + expDeps: NoTables, + }, { + sql: "select t1.col from t1 order by 1", + expSQL: "select t1.col from t1 order by t1.col asc", + expDeps: TS0, + }, { + sql: "select t1.col from t1 order by 1.0", + expSQL: "select t1.col from t1 order by 1.0 asc", + expDeps: NoTables, + }, { + sql: "select t1.col from t1 order by 'fubick'", + expSQL: "select t1.col from t1 order by 'fubick' asc", + expDeps: NoTables, + }, { + sql: "select t1.col as foo from t1 order by 1", + expSQL: "select t1.col as foo from t1 order by t1.col asc", + expDeps: TS0, + }, { + sql: "select t1.col as xyz, count(*) from t1 group by 1 order by 2", + expSQL: "select t1.col as xyz, count(*) from t1 group by t1.col order by count(*) asc", + expDeps: TS0, }, { sql: "select id from t1 order by 2", expErr: "Unknown column '2' in 'order clause'", @@ -334,21 +469,132 @@ func TestOrderByGroupByLiteral(t *testing.T) { sql: "select *, id from t1 order by 2", expErr: "cannot use column offsets in order clause when using `*`", }, { - sql: "select *, id from t1 group by 2", - expErr: "cannot use column offsets in group statement when using `*`", + sql: "select id from t1 order by 1 collate utf8_general_ci", + expSQL: "select id from t1 order by id collate utf8_general_ci asc", + expDeps: TS0, + }, { + sql: "select id from `user` union select 1 from dual order by 1", + expSQL: "select id from `user` union select 1 from dual order by id asc", + expDeps: TS0, + }, { + sql: "select id from t1 order by 2", + expErr: "Unknown column '2' in 'order clause'", + }, { + sql: "select a.id, b.id from user as a, user_extra as b union select 1, 2 order by 1", + expSQL: "select a.id, b.id from `user` as a, user_extra as b union select 1, 2 from dual order by id asc", + expDeps: TS0, }, { - sql: "select id from t1 order by 1 collate utf8_general_ci", - expSQL: "select id from t1 order by id collate utf8_general_ci asc", + sql: "select a.id, b.id from user as a, user_extra as b union select 1, 2 order by 2", + expSQL: "select a.id, b.id from `user` as a, user_extra as b union select 1, 2 from dual order by id asc", + expDeps: TS1, + }, { + sql: "select user.id as foo from user union select col from user_extra order by 1", + expSQL: "select `user`.id as foo from `user` union select col from user_extra order by foo asc", + expDeps: MergeTableSets(TS0, TS1), }} + for _, tcase := range tcases { + t.Run(tcase.sql, func(t *testing.T) { + ast, err := sqlparser.Parse(tcase.sql) + require.NoError(t, err) + selectStatement := ast.(sqlparser.SelectStatement) + st, err := Analyze(selectStatement, cDB, schemaInfo) + if tcase.expErr == "" { + require.NoError(t, err) + assert.Equal(t, tcase.expSQL, sqlparser.String(selectStatement)) + ordering := selectStatement.GetOrderBy() + deps := st.RecursiveDeps(ordering[0].Expr) + assert.Equal(t, tcase.expDeps, deps) + } else { + require.EqualError(t, err, tcase.expErr) + } + }) + } +} + +func TestHavingColumnName(t *testing.T) { + schemaInfo := getSchemaWithKnownColumns() + cDB := "db" + tcases := []struct { + sql string + expSQL string + expDeps TableSet + expErr string + warning string + }{{ + sql: "select id, sum(foo) as sumOfFoo from t1 having sumOfFoo > 1", + expSQL: "select id, sum(foo) as sumOfFoo from t1 having sum(t1.foo) > 1", + expDeps: TS0, + }, { + sql: "select id as X, sum(foo) as X from t1 having X > 1", + expErr: "Column 'X' in field list is ambiguous", + }, { + sql: "select id, sum(t1.foo) as foo from t1 having sum(foo) > 1", + expSQL: "select id, sum(t1.foo) as foo from t1 having sum(foo) > 1", + expDeps: TS0, + warning: "Column 'foo' in having clause is ambiguous", + }, { + sql: "select id, sum(t1.foo) as XYZ from t1 having sum(XYZ) > 1", + expErr: "Invalid use of group function", + }, { + sql: "select foo + 2 as foo from t1 having foo = 42", + expSQL: "select foo + 2 as foo from t1 having t1.foo + 2 = 42", + expDeps: TS0, + }, { + sql: "select count(*), ename from emp group by ename having comm > 1000", + expErr: "Unknown column 'comm' in 'having clause'", + }, { + sql: "select sal, ename from emp having empno > 1000", + expSQL: "select sal, ename from emp having empno > 1000", + expDeps: TS0, + }, { + sql: "select foo, count(*) foo from t1 group by foo having foo > 1000", + expErr: "Column 'foo' in field list is ambiguous", + }, { + sql: "select foo, count(*) foo from t1, emp group by foo having sum(sal) > 1000", + expSQL: "select foo, count(*) as foo from t1, emp group by foo having sum(sal) > 1000", + expDeps: TS1, + warning: "Column 'foo' in group statement is ambiguous", + }, { + sql: "select foo as X, sal as foo from t1, emp having sum(X) > 1000", + expSQL: "select foo as X, sal as foo from t1, emp having sum(t1.foo) > 1000", + expDeps: TS0, + }, { + sql: "select count(*) a from someTable having a = 10", + expSQL: "select count(*) as a from someTable having count(*) = 10", + expDeps: TS0, + }, { + sql: "select count(*) from emp having ename = 10", + expSQL: "select count(*) from emp having ename = 10", + expDeps: TS0, + }, { + sql: "select sum(sal) empno from emp where ename > 0 having empno = 2", + expSQL: "select sum(sal) as empno from emp where ename > 0 having sum(emp.sal) = 2", + expDeps: TS0, + }, { + // test with missing schema info + sql: "select foo, count(bar) as x from someTable group by foo having id > avg(baz)", + expErr: "Unknown column 'id' in 'having clause'", + }, { + sql: "select t1.foo as alias, count(bar) as x from t1 group by foo having foo+54 = 56", + expSQL: "select t1.foo as alias, count(bar) as x from t1 group by foo having foo + 54 = 56", + expDeps: TS0, + }, { + sql: "select 1 from t1 group by foo having foo = 1 and count(*) > 1", + expSQL: "select 1 from t1 group by foo having foo = 1 and count(*) > 1", + expDeps: TS0, + }} + for _, tcase := range tcases { t.Run(tcase.sql, func(t *testing.T) { ast, err := sqlparser.Parse(tcase.sql) require.NoError(t, err) selectStatement := ast.(*sqlparser.Select) - _, err = Analyze(selectStatement, cDB, schemaInfo) + semTbl, err := AnalyzeStrict(selectStatement, cDB, schemaInfo) if tcase.expErr == "" { require.NoError(t, err) assert.Equal(t, tcase.expSQL, sqlparser.String(selectStatement)) + assert.Equal(t, tcase.expDeps, semTbl.RecursiveDeps(selectStatement.Having.Expr)) + assert.Equal(t, tcase.warning, semTbl.Warning, "warning") } else { require.EqualError(t, err, tcase.expErr) } @@ -356,34 +602,132 @@ func TestOrderByGroupByLiteral(t *testing.T) { } } -func TestHavingAndOrderByColumnName(t *testing.T) { +func getSchemaWithKnownColumns() *FakeSI { schemaInfo := &FakeSI{ - Tables: map[string]*vindexes.Table{}, + Tables: map[string]*vindexes.Table{ + "t1": { + Keyspace: &vindexes.Keyspace{Name: "ks", Sharded: true}, + Name: sqlparser.NewIdentifierCS("t1"), + Columns: []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("id"), + Type: sqltypes.VarChar, + }, { + Name: sqlparser.NewIdentifierCI("foo"), + Type: sqltypes.VarChar, + }, { + Name: sqlparser.NewIdentifierCI("bar"), + Type: sqltypes.VarChar, + }}, + ColumnListAuthoritative: true, + }, + "emp": { + Keyspace: &vindexes.Keyspace{Name: "ks", Sharded: true}, + Name: sqlparser.NewIdentifierCS("emp"), + Columns: []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("empno"), + Type: sqltypes.Int64, + }, { + Name: sqlparser.NewIdentifierCI("ename"), + Type: sqltypes.VarChar, + }, { + Name: sqlparser.NewIdentifierCI("sal"), + Type: sqltypes.Int64, + }}, + ColumnListAuthoritative: true, + }, + }, } + return schemaInfo +} + +func TestOrderByColumnName(t *testing.T) { + schemaInfo := getSchemaWithKnownColumns() cDB := "db" tcases := []struct { - sql string - expSQL string - expErr string + sql string + expSQL string + expErr string + warning string + deps TableSet }{{ - sql: "select id, sum(foo) as sumOfFoo from t1 having sumOfFoo > 1", - expSQL: "select id, sum(foo) as sumOfFoo from t1 having sum(foo) > 1", - }, { sql: "select id, sum(foo) as sumOfFoo from t1 order by sumOfFoo", - expSQL: "select id, sum(foo) as sumOfFoo from t1 order by sum(foo) asc", - }, { - sql: "select id, sum(foo) as foo from t1 having sum(foo) > 1", - expSQL: "select id, sum(foo) as foo from t1 having sum(foo) > 1", + expSQL: "select id, sum(foo) as sumOfFoo from t1 order by sum(t1.foo) asc", + deps: TS0, + }, { + sql: "select id, sum(foo) as sumOfFoo from t1 order by sumOfFoo + 1", + expSQL: "select id, sum(foo) as sumOfFoo from t1 order by sum(t1.foo) + 1 asc", + deps: TS0, + }, { + sql: "select id, sum(foo) as sumOfFoo from t1 order by abs(sumOfFoo)", + expSQL: "select id, sum(foo) as sumOfFoo from t1 order by abs(sum(t1.foo)) asc", + deps: TS0, + }, { + sql: "select id, sum(foo) as sumOfFoo from t1 order by max(sumOfFoo)", + expErr: "Invalid use of group function", + }, { + sql: "select id, sum(foo) as foo from t1 order by foo + 1", + expSQL: "select id, sum(foo) as foo from t1 order by foo + 1 asc", + deps: TS0, + warning: "Column 'foo' in order by statement is ambiguous", + }, { + sql: "select id, sum(foo) as foo from t1 order by foo", + expSQL: "select id, sum(foo) as foo from t1 order by sum(t1.foo) asc", + deps: TS0, + warning: "Column 'foo' in order by statement is ambiguous", + }, { + sql: "select id, lower(min(foo)) as foo from t1 order by min(foo)", + expSQL: "select id, lower(min(foo)) as foo from t1 order by min(foo) asc", + deps: TS0, + warning: "Column 'foo' in order by statement is ambiguous", + }, { + sql: "select id, lower(min(foo)) as foo from t1 order by foo", + expSQL: "select id, lower(min(foo)) as foo from t1 order by lower(min(t1.foo)) asc", + deps: TS0, + warning: "Column 'foo' in order by statement is ambiguous", + }, { + sql: "select id, lower(min(foo)) as foo from t1 order by abs(foo)", + expSQL: "select id, lower(min(foo)) as foo from t1 order by abs(foo) asc", + deps: TS0, + warning: "Column 'foo' in order by statement is ambiguous", + }, { + sql: "select id, t1.bar as foo from t1 group by id order by min(foo)", + expSQL: "select id, t1.bar as foo from t1 group by id order by min(foo) asc", + deps: TS0, + warning: "Column 'foo' in order by statement is ambiguous", + }, { + sql: "select id, bar as id, count(*) from t1 order by id", + expErr: "Column 'id' in field list is ambiguous", + }, { + sql: "select id, id, count(*) from t1 order by id", + expSQL: "select id, id, count(*) from t1 order by t1.id asc", + deps: TS0, + warning: "Column 'id' in order by statement is ambiguous", + }, { + sql: "select id, count(distinct foo) k from t1 group by id order by k", + expSQL: "select id, count(distinct foo) as k from t1 group by id order by count(distinct t1.foo) asc", + deps: TS0, + warning: "Column 'id' in group statement is ambiguous", + }, { + sql: "select user.id as foo from user union select col from user_extra order by foo", + expSQL: "select `user`.id as foo from `user` union select col from user_extra order by foo asc", + deps: MergeTableSets(TS0, TS1), + }, { + sql: "select foo as X, sal as foo from t1, emp order by sum(X)", + expSQL: "select foo as X, sal as foo from t1, emp order by sum(t1.foo) asc", + deps: TS0, }} for _, tcase := range tcases { t.Run(tcase.sql, func(t *testing.T) { ast, err := sqlparser.Parse(tcase.sql) require.NoError(t, err) - selectStatement := ast.(*sqlparser.Select) - _, err = Analyze(selectStatement, cDB, schemaInfo) + selectStatement := ast.(sqlparser.SelectStatement) + semTable, err := AnalyzeStrict(selectStatement, cDB, schemaInfo) if tcase.expErr == "" { require.NoError(t, err) assert.Equal(t, tcase.expSQL, sqlparser.String(selectStatement)) + orderByExpr := selectStatement.GetOrderBy()[0].Expr + assert.Equal(t, tcase.deps, semTable.RecursiveDeps(orderByExpr)) + assert.Equal(t, tcase.warning, semTable.Warning) } else { require.EqualError(t, err, tcase.expErr) } @@ -448,7 +792,7 @@ func TestSemTableDependenciesAfterExpandStar(t *testing.T) { func TestRewriteNot(t *testing.T) { ks := &vindexes.Keyspace{ Name: "main", - Sharded: false, + Sharded: true, } schemaInfo := &FakeSI{ Tables: map[string]*vindexes.Table{ @@ -500,7 +844,7 @@ func TestRewriteNot(t *testing.T) { func TestConstantFolding(t *testing.T) { ks := &vindexes.Keyspace{ Name: "main", - Sharded: false, + Sharded: true, } schemaInfo := &FakeSI{ Tables: map[string]*vindexes.Table{ @@ -542,3 +886,27 @@ func TestConstantFolding(t *testing.T) { }) } } + +// TestDeleteTargetTableRewrite checks that delete target rewrite is done correctly. +func TestDeleteTargetTableRewrite(t *testing.T) { + cDB := "db" + tcases := []struct { + sql string + target string + }{{ + sql: "delete t2 from t t1, t t2", + target: "t2", + }, { + sql: "delete t2,t1 from t t1, t t2", + target: "t2, t1", + }} + for _, tcase := range tcases { + t.Run(tcase.sql, func(t *testing.T) { + ast, err := sqlparser.Parse(tcase.sql) + require.NoError(t, err) + _, err = Analyze(ast, cDB, fakeSchemaInfo()) + require.NoError(t, err) + require.Equal(t, tcase.target, sqlparser.String(ast.(*sqlparser.Delete).Targets)) + }) + } +} diff --git a/go/vt/vtgate/semantics/errors.go b/go/vt/vtgate/semantics/errors.go index 8d0b23d7f82..6e66a806543 100644 --- a/go/vt/vtgate/semantics/errors.go +++ b/go/vt/vtgate/semantics/errors.go @@ -51,6 +51,8 @@ type ( AmbiguousColumnError struct{ Column string } SubqueryColumnCountError struct{ Expected int } ColumnsMissingInSchemaError struct{} + InvalidUseOfGroupFunction struct{} + CantGroupOn struct{ Column string } UnsupportedMultiTablesInUpdateError struct { ExprCount int @@ -64,6 +66,10 @@ type ( Column *sqlparser.ColName Table *sqlparser.TableName } + ColumnNotFoundClauseError struct { + Column string + Clause string + } ) func eprintf(e error, format string, args ...any) string { @@ -261,3 +267,42 @@ func (e *ColumnsMissingInSchemaError) Error() string { func (e *ColumnsMissingInSchemaError) ErrorCode() vtrpcpb.Code { return vtrpcpb.Code_INVALID_ARGUMENT } + +// InvalidUserOfGroupFunction +func (*InvalidUseOfGroupFunction) Error() string { + return "Invalid use of group function" +} + +func (*InvalidUseOfGroupFunction) ErrorCode() vtrpcpb.Code { + return vtrpcpb.Code_INVALID_ARGUMENT +} + +func (*InvalidUseOfGroupFunction) ErrorState() vterrors.State { + return vterrors.InvalidGroupFuncUse +} + +// CantGroupOn +func (e *CantGroupOn) Error() string { + return vterrors.VT03005(e.Column).Error() +} + +func (*CantGroupOn) ErrorCode() vtrpcpb.Code { + return vtrpcpb.Code_INVALID_ARGUMENT +} + +func (e *CantGroupOn) ErrorState() vterrors.State { + return vterrors.VT03005(e.Column).State +} + +// ColumnNotFoundInGroupByError +func (e *ColumnNotFoundClauseError) Error() string { + return fmt.Sprintf("Unknown column '%s' in '%s'", e.Column, e.Clause) +} + +func (*ColumnNotFoundClauseError) ErrorCode() vtrpcpb.Code { + return vtrpcpb.Code_INVALID_ARGUMENT +} + +func (e *ColumnNotFoundClauseError) ErrorState() vterrors.State { + return vterrors.BadFieldError +} diff --git a/go/vt/vtgate/semantics/real_table.go b/go/vt/vtgate/semantics/real_table.go index bd57ab81474..2d41653bdf3 100644 --- a/go/vt/vtgate/semantics/real_table.go +++ b/go/vt/vtgate/semantics/real_table.go @@ -105,11 +105,8 @@ func vindexTableToColumnInfo(tbl *vindexes.Table) []ColumnInfo { cols := make([]ColumnInfo, 0, len(tbl.Columns)) for _, col := range tbl.Columns { collation := collations.DefaultCollationForType(col.Type) - if sqltypes.IsText(col.Type) { - coll, found := collations.Local().LookupID(col.CollationName) - if found { - collation = coll - } + if sqltypes.IsText(col.Type) && col.CollationName != "" { + collation, _ = collations.Local().LookupID(col.CollationName) } cols = append(cols, ColumnInfo{ diff --git a/go/vt/vtgate/semantics/scoper.go b/go/vt/vtgate/semantics/scoper.go index 5d27b31b84e..107966006f4 100644 --- a/go/vt/vtgate/semantics/scoper.go +++ b/go/vt/vtgate/semantics/scoper.go @@ -40,12 +40,15 @@ type ( } scope struct { - parent *scope - stmt sqlparser.Statement - tables []TableInfo - isUnion bool - joinUsing map[string]TableSet - stmtScope bool + parent *scope + stmt sqlparser.Statement + tables []TableInfo + isUnion bool + joinUsing map[string]TableSet + stmtScope bool + inGroupBy bool + inHaving bool + inHavingAggr bool } ) @@ -73,11 +76,20 @@ func (s *scoper) down(cursor *sqlparser.Cursor) error { return s.addColumnInfoForOrderBy(cursor, node) case sqlparser.GroupBy: return s.addColumnInfoForGroupBy(cursor, node) - case *sqlparser.Where: - if node.Type != sqlparser.HavingClause { + case sqlparser.AggrFunc: + if !s.currentScope().inHaving { break } - return s.createSpecialScopePostProjection(cursor.Parent()) + s.currentScope().inHavingAggr = true + case *sqlparser.Where: + if node.Type == sqlparser.HavingClause { + err := s.createSpecialScopePostProjection(cursor.Parent()) + if err != nil { + return err + } + s.currentScope().inHaving = true + return nil + } } return nil } @@ -87,10 +99,12 @@ func (s *scoper) addColumnInfoForGroupBy(cursor *sqlparser.Cursor, node sqlparse if err != nil { return err } + currentScope := s.currentScope() + currentScope.inGroupBy = true for _, expr := range node { lit := keepIntLiteral(expr) if lit != nil { - s.specialExprScopes[lit] = s.currentScope() + s.specialExprScopes[lit] = currentScope } } return nil @@ -194,6 +208,8 @@ func (s *scoper) up(cursor *sqlparser.Cursor) error { break } s.popScope() + case sqlparser.AggrFunc: + s.currentScope().inHavingAggr = false case sqlparser.TableExpr: if isParentSelect(cursor) { curScope := s.currentScope() diff --git a/go/vt/vtgate/semantics/semantic_state.go b/go/vt/vtgate/semantics/semantic_state.go index 0af935918f9..1ede4731edd 100644 --- a/go/vt/vtgate/semantics/semantic_state.go +++ b/go/vt/vtgate/semantics/semantic_state.go @@ -351,7 +351,12 @@ func (st *SemTable) NeedsWeightString(e sqlparser.Expr) bool { if !found { return true } - return typ.Collation == collations.Unknown && !sqltypes.IsNumber(typ.Type) + + if sqltypes.IsNumber(typ.Type) { + return false + } + + return !collations.Local().IsSupported(typ.Collation) } } @@ -434,11 +439,8 @@ func (st *SemTable) ColumnLookup(col *sqlparser.ColName) (int, error) { return 0, columnNotSupportedErr } -// SingleUnshardedKeyspace returns the single keyspace if all tables in the query are in the same, unsharded keyspace -func (st *SemTable) SingleUnshardedKeyspace() (*vindexes.Keyspace, []*vindexes.Table) { - var ks *vindexes.Keyspace - var tables []*vindexes.Table - for _, table := range st.Tables { +func singleUnshardedKeyspace(in []TableInfo) (ks *vindexes.Keyspace, tables []*vindexes.Table) { + for _, table := range in { vindexTable := table.GetVindexTable() if vindexTable == nil { @@ -479,7 +481,12 @@ func (st *SemTable) SingleUnshardedKeyspace() (*vindexes.Keyspace, []*vindexes.T } tables = append(tables, vindexTable) } - return ks, tables + return +} + +// SingleUnshardedKeyspace returns the single keyspace if all tables in the query are in the same, unsharded keyspace +func (st *SemTable) SingleUnshardedKeyspace() (*vindexes.Keyspace, []*vindexes.Table) { + return singleUnshardedKeyspace(st.Tables) } // EqualsExpr compares two expressions using the semantic analysis information. diff --git a/go/vt/vtgate/semantics/table_analyzer.go b/go/vt/vtgate/semantics/table_analyzer.go index d7377fa414a..8e2f5f6d2b9 100644 --- a/go/vt/vtgate/semantics/table_analyzer.go +++ b/go/vt/vtgate/semantics/table_analyzer.go @@ -20,7 +20,7 @@ import "vitess.io/vitess/go/vt/sqlparser" // TableAnalyze analyzes the parsed query. func TableAnalyze(statement sqlparser.Statement, currentDb string, si SchemaInformation) (*SemTable, error) { - analyzer := newAnalyzer(currentDb, newSchemaInfo(si)) + analyzer := newAnalyzer(currentDb, newSchemaInfo(si), false) // Analysis for initial scope err := analyzer.tableAnalyze(statement) @@ -29,12 +29,21 @@ func TableAnalyze(statement sqlparser.Statement, currentDb string, si SchemaInfo } // Creation of the semantic table - semTable := analyzer.newSemTable(statement, si.ConnCollation()) - - return semTable, nil + return analyzer.newSemTable(statement, si.ConnCollation()) } func (a *analyzer) tableAnalyze(statement sqlparser.Statement) error { + _ = sqlparser.Rewrite(statement, nil, a.earlyUp) + if a.err != nil { + return a.err + } + + if a.canShortCut(statement) { + return nil + } + + a.lateInit() + _ = sqlparser.Rewrite(statement, a.tableAnalyzeDown, a.tableAnalyzeUp) return a.err } @@ -54,8 +63,6 @@ func (a *analyzer) tableAnalyzeDown(cursor *sqlparser.Cursor) bool { a.setError(err) return true } - // log any warn in rewriting. - a.warning = a.rewriter.warning a.noteQuerySignature(cursor.Node()) diff --git a/go/vt/vtgate/semantics/table_collector.go b/go/vt/vtgate/semantics/table_collector.go index d6fd4c6efd6..c6af502cb22 100644 --- a/go/vt/vtgate/semantics/table_collector.go +++ b/go/vt/vtgate/semantics/table_collector.go @@ -33,17 +33,71 @@ type tableCollector struct { currentDb string org originable unionInfo map[*sqlparser.Union]unionInfo + done map[*sqlparser.AliasedTableExpr]TableInfo } -func newTableCollector(scoper *scoper, si SchemaInformation, currentDb string) *tableCollector { +type earlyTableCollector struct { + si SchemaInformation + currentDb string + Tables []TableInfo + done map[*sqlparser.AliasedTableExpr]TableInfo + withTables map[sqlparser.IdentifierCS]any +} + +func newEarlyTableCollector(si SchemaInformation, currentDb string) *earlyTableCollector { + return &earlyTableCollector{ + si: si, + currentDb: currentDb, + done: map[*sqlparser.AliasedTableExpr]TableInfo{}, + withTables: map[sqlparser.IdentifierCS]any{}, + } +} + +func (etc *earlyTableCollector) up(cursor *sqlparser.Cursor) { + switch node := cursor.Node().(type) { + case *sqlparser.AliasedTableExpr: + etc.visitAliasedTableExpr(node) + } +} + +func (etc *earlyTableCollector) visitAliasedTableExpr(aet *sqlparser.AliasedTableExpr) { + tbl, ok := aet.Expr.(sqlparser.TableName) + if !ok { + return + } + etc.handleTableName(tbl, aet) +} + +func (etc *earlyTableCollector) newTableCollector(scoper *scoper, org originable) *tableCollector { return &tableCollector{ + Tables: etc.Tables, scoper: scoper, - si: si, - currentDb: currentDb, + si: etc.si, + currentDb: etc.currentDb, unionInfo: map[*sqlparser.Union]unionInfo{}, + done: etc.done, + org: org, } } +func (etc *earlyTableCollector) handleTableName(tbl sqlparser.TableName, aet *sqlparser.AliasedTableExpr) { + if tbl.Qualifier.IsEmpty() { + _, isCTE := etc.withTables[tbl.Name] + if isCTE { + // no need to handle these tables here, we wait for the late phase instead + return + } + } + tableInfo, err := getTableInfo(aet, tbl, etc.si, etc.currentDb) + if err != nil { + // this could just be a CTE that we haven't processed, so we'll give it the benefit of the doubt for now + return + } + + etc.done[aet] = tableInfo + etc.Tables = append(etc.Tables, tableInfo) +} + func (tc *tableCollector) up(cursor *sqlparser.Cursor) error { switch node := cursor.Node().(type) { case *sqlparser.AliasedTableExpr: @@ -100,26 +154,43 @@ func (tc *tableCollector) visitAliasedTableExpr(node *sqlparser.AliasedTableExpr } case sqlparser.TableName: - var tbl *vindexes.Table - var vindex vindexes.Vindex - isInfSchema := sqlparser.SystemSchema(t.Qualifier.String()) - var err error - tbl, vindex, _, _, _, err = tc.si.FindTableOrVindex(t) - if err != nil && !isInfSchema { - // if we are dealing with a system table, it might not be available in the vschema, but that is OK + return tc.handleTableName(node, t) + } + return nil +} + +func (tc *tableCollector) handleTableName(node *sqlparser.AliasedTableExpr, t sqlparser.TableName) (err error) { + var tableInfo TableInfo + var found bool + + tableInfo, found = tc.done[node] + if !found { + tableInfo, err = getTableInfo(node, t, tc.si, tc.currentDb) + if err != nil { return err } - if tbl == nil && vindex != nil { - tbl = newVindexTable(t.Name) - } + tc.Tables = append(tc.Tables, tableInfo) + } - scope := tc.scoper.currentScope() - tableInfo := tc.createTable(t, node, tbl, isInfSchema, vindex) + scope := tc.scoper.currentScope() + return scope.addTable(tableInfo) +} - tc.Tables = append(tc.Tables, tableInfo) - return scope.addTable(tableInfo) +func getTableInfo(node *sqlparser.AliasedTableExpr, t sqlparser.TableName, si SchemaInformation, currentDb string) (TableInfo, error) { + var tbl *vindexes.Table + var vindex vindexes.Vindex + isInfSchema := sqlparser.SystemSchema(t.Qualifier.String()) + var err error + tbl, vindex, _, _, _, err = si.FindTableOrVindex(t) + if err != nil && !isInfSchema { + // if we are dealing with a system table, it might not be available in the vschema, but that is OK + return nil, err } - return nil + if tbl == nil && vindex != nil { + tbl = newVindexTable(t.Name) + } + + return createTable(t, node, tbl, isInfSchema, vindex, currentDb), nil } func (tc *tableCollector) addSelectDerivedTable(sel *sqlparser.Select, node *sqlparser.AliasedTableExpr) error { @@ -207,12 +278,13 @@ func (tc *tableCollector) tableInfoFor(id TableSet) (TableInfo, error) { return tc.Tables[offset], nil } -func (tc *tableCollector) createTable( +func createTable( t sqlparser.TableName, alias *sqlparser.AliasedTableExpr, tbl *vindexes.Table, isInfSchema bool, vindex vindexes.Vindex, + currentDb string, ) TableInfo { table := &RealTable{ tableName: alias.As.String(), @@ -224,7 +296,7 @@ func (tc *tableCollector) createTable( if alias.As.IsEmpty() { dbName := t.Qualifier.String() if dbName == "" { - dbName = tc.currentDb + dbName = currentDb } table.dbName = dbName diff --git a/go/vt/vtgate/semantics/typer_test.go b/go/vt/vtgate/semantics/typer_test.go index 4c77e6f5657..8933b483a2e 100644 --- a/go/vt/vtgate/semantics/typer_test.go +++ b/go/vt/vtgate/semantics/typer_test.go @@ -21,6 +21,7 @@ import ( "github.com/stretchr/testify/require" + "vitess.io/vitess/go/mysql/collations/colldata" querypb "vitess.io/vitess/go/vt/proto/query" "vitess.io/vitess/go/vt/sqlparser" ) @@ -54,5 +55,40 @@ func TestNormalizerAndSemanticAnalysisIntegration(t *testing.T) { require.Equal(t, test.typ, typ.Type.String()) }) } +} + +// Tests that the types correctly picks up and sets the collation on columns +func TestColumnCollations(t *testing.T) { + tests := []struct { + query, collation string + }{ + {query: "select textcol from t2"}, + {query: "select name from t2", collation: "utf8mb3_bin"}, + } + + for _, test := range tests { + t.Run(test.query, func(t *testing.T) { + parse, err := sqlparser.Parse(test.query) + require.NoError(t, err) + err = sqlparser.Normalize(parse, sqlparser.NewReservedVars("bv", sqlparser.BindVars{}), map[string]*querypb.BindVariable{}) + require.NoError(t, err) + + st, err := Analyze(parse, "d", fakeSchemaInfo()) + require.NoError(t, err) + col := extract(parse.(*sqlparser.Select), 0) + typ, coll, found := st.TypeForExpr(col) + require.True(t, found, "column was not typed") + + require.Equal(t, "VARCHAR", typ.String()) + collation := colldata.Lookup(coll) + if test.collation != "" { + collation := colldata.Lookup(coll) + require.NotNil(t, collation) + require.Equal(t, test.collation, collation.Name()) + } else { + require.Nil(t, collation) + } + }) + } } diff --git a/go/vt/vtgate/semantics/vtable.go b/go/vt/vtgate/semantics/vtable.go index ce7efe22371..ebb72e87b2a 100644 --- a/go/vt/vtgate/semantics/vtable.go +++ b/go/vt/vtgate/semantics/vtable.go @@ -42,10 +42,25 @@ func (v *vTableInfo) dependencies(colName string, org originable) (dependencies, if name != colName { continue } - directDeps, recursiveDeps, qt := org.depsForExpr(v.cols[i]) + deps = deps.merge(v.createCertainForCol(org, i), false) + } + if deps.empty() && v.hasStar() { + return createUncertain(v.tables, v.tables), nil + } + return deps, nil +} - newDeps := createCertain(directDeps, recursiveDeps, qt) - deps = deps.merge(newDeps, false) +func (v *vTableInfo) dependenciesInGroupBy(colName string, org originable) (dependencies, error) { + // this method is consciously very similar to vTableInfo.dependencies and should remain so + var deps dependencies = ¬hing{} + for i, name := range v.columnNames { + if name != colName { + continue + } + if sqlparser.ContainsAggregation(v.cols[i]) { + return nil, &CantGroupOn{name} + } + deps = deps.merge(v.createCertainForCol(org, i), false) } if deps.empty() && v.hasStar() { return createUncertain(v.tables, v.tables), nil @@ -53,6 +68,12 @@ func (v *vTableInfo) dependencies(colName string, org originable) (dependencies, return deps, nil } +func (v *vTableInfo) createCertainForCol(org originable, i int) *certain { + directDeps, recursiveDeps, qt := org.depsForExpr(v.cols[i]) + newDeps := createCertain(directDeps, recursiveDeps, qt) + return newDeps +} + // IsInfSchema implements the TableInfo interface func (v *vTableInfo) IsInfSchema() bool { return false diff --git a/go/vt/vtgate/vindexes/vschema.go b/go/vt/vtgate/vindexes/vschema.go index 04f17550da5..9a652ceb696 100644 --- a/go/vt/vtgate/vindexes/vschema.go +++ b/go/vt/vtgate/vindexes/vschema.go @@ -25,6 +25,9 @@ import ( "strings" "time" + "vitess.io/vitess/go/vt/sqlparser" + "vitess.io/vitess/go/vt/vterrors" + querypb "vitess.io/vitess/go/vt/proto/query" topodatapb "vitess.io/vitess/go/vt/proto/topodata" vschemapb "vitess.io/vitess/go/vt/proto/vschema" @@ -33,8 +36,6 @@ import ( "vitess.io/vitess/go/json2" "vitess.io/vitess/go/sqlescape" "vitess.io/vitess/go/sqltypes" - "vitess.io/vitess/go/vt/sqlparser" - "vitess.io/vitess/go/vt/vterrors" ) // TabletTypeSuffix maps the tablet type to its suffix string. diff --git a/go/vt/vtgate/vschema_manager.go b/go/vt/vtgate/vschema_manager.go index 11830cfc7d4..748b1e4a512 100644 --- a/go/vt/vtgate/vschema_manager.go +++ b/go/vt/vtgate/vschema_manager.go @@ -22,6 +22,7 @@ import ( "vitess.io/vitess/go/vt/log" topodatapb "vitess.io/vitess/go/vt/proto/topodata" + "vitess.io/vitess/go/vt/schema" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/srvtopo" "vitess.io/vitess/go/vt/topo" @@ -213,6 +214,10 @@ func (vm *VSchemaManager) updateFromSchema(vschema *vindexes.VSchema) { // in the tables. for tblName, tblInfo := range m { for _, fkDef := range tblInfo.ForeignKeys { + // Ignore internal tables as part of foreign key references. + if schema.IsInternalOperationTableName(fkDef.ReferenceDefinition.ReferencedTable.Name.String()) { + continue + } parentTbl, err := vschema.FindRoutedTable(ksName, fkDef.ReferenceDefinition.ReferencedTable.Name.String(), topodatapb.TabletType_PRIMARY) if err != nil { log.Errorf("error finding parent table %s: %v", fkDef.ReferenceDefinition.ReferencedTable.Name.String(), err) diff --git a/go/vt/vtgate/vschema_manager_test.go b/go/vt/vtgate/vschema_manager_test.go index 6e7a9a9a2d1..8491b19f9a2 100644 --- a/go/vt/vtgate/vschema_manager_test.go +++ b/go/vt/vtgate/vschema_manager_test.go @@ -372,6 +372,100 @@ func TestRebuildVSchema(t *testing.T) { } } +// TestVSchemaUpdateWithFKReferenceToInternalTables tests that any internal table as part of fk reference is ignored. +func TestVSchemaUpdateWithFKReferenceToInternalTables(t *testing.T) { + ks := &vindexes.Keyspace{Name: "ks"} + cols1 := []vindexes.Column{{ + Name: sqlparser.NewIdentifierCI("id"), + Type: querypb.Type_INT64, + }} + sqlparserCols1 := sqlparser.MakeColumns("id") + + vindexTable_t1 := &vindexes.Table{ + Name: sqlparser.NewIdentifierCS("t1"), + Keyspace: ks, + Columns: cols1, + ColumnListAuthoritative: true, + } + vindexTable_t2 := &vindexes.Table{ + Name: sqlparser.NewIdentifierCS("t2"), + Keyspace: ks, + Columns: cols1, + ColumnListAuthoritative: true, + } + + vindexTable_t1.ChildForeignKeys = append(vindexTable_t1.ChildForeignKeys, vindexes.ChildFKInfo{ + Table: vindexTable_t2, + ChildColumns: sqlparserCols1, + ParentColumns: sqlparserCols1, + OnDelete: sqlparser.SetNull, + OnUpdate: sqlparser.Cascade, + }) + vindexTable_t2.ParentForeignKeys = append(vindexTable_t2.ParentForeignKeys, vindexes.ParentFKInfo{ + Table: vindexTable_t1, + ChildColumns: sqlparserCols1, + ParentColumns: sqlparserCols1, + }) + + vm := &VSchemaManager{} + var vs *vindexes.VSchema + vm.subscriber = func(vschema *vindexes.VSchema, _ *VSchemaStats) { + vs = vschema + vs.ResetCreated() + } + vm.schema = &fakeSchema{t: map[string]*vindexes.TableInfo{ + "t1": {Columns: cols1}, + "t2": { + Columns: cols1, + ForeignKeys: []*sqlparser.ForeignKeyDefinition{ + createFkDefinition([]string{"id"}, "t1", []string{"id"}, sqlparser.Cascade, sqlparser.SetNull), + createFkDefinition([]string{"id"}, "_vt_HOLD_6ace8bcef73211ea87e9f875a4d24e90_20200915120410", []string{"id"}, sqlparser.Cascade, sqlparser.SetNull), + }, + }, + }} + vm.VSchemaUpdate(&vschemapb.SrvVSchema{ + Keyspaces: map[string]*vschemapb.Keyspace{ + "ks": { + ForeignKeyMode: vschemapb.Keyspace_managed, + Tables: map[string]*vschemapb.Table{ + "t1": {Columns: []*vschemapb.Column{{Name: "id", Type: querypb.Type_INT64}}}, + "t2": {Columns: []*vschemapb.Column{{Name: "id", Type: querypb.Type_INT64}}}, + }, + }, + }, + }, nil) + + utils.MustMatchFn(".globalTables", ".uniqueVindexes")(t, &vindexes.VSchema{ + RoutingRules: map[string]*vindexes.RoutingRule{}, + Keyspaces: map[string]*vindexes.KeyspaceSchema{ + "ks": { + Keyspace: ks, + ForeignKeyMode: vschemapb.Keyspace_managed, + Vindexes: map[string]vindexes.Vindex{}, + Tables: map[string]*vindexes.Table{ + "t1": vindexTable_t1, + "t2": vindexTable_t2, + }, + }, + }, + }, vs) + utils.MustMatch(t, vs, vm.currentVschema, "currentVschema should have same reference as Vschema") +} + +// createFkDefinition is a helper function to create a Foreign key definition struct from the columns used in it provided as list of strings. +func createFkDefinition(childCols []string, parentTableName string, parentCols []string, onUpdate, onDelete sqlparser.ReferenceAction) *sqlparser.ForeignKeyDefinition { + pKs, pTbl, _ := sqlparser.ParseTable(parentTableName) + return &sqlparser.ForeignKeyDefinition{ + Source: sqlparser.MakeColumns(childCols...), + ReferenceDefinition: &sqlparser.ReferenceDefinition{ + ReferencedTable: sqlparser.NewTableNameWithQualifier(pTbl, pKs), + ReferencedColumns: sqlparser.MakeColumns(parentCols...), + OnUpdate: onUpdate, + OnDelete: onDelete, + }, + } +} + func makeTestVSchema(ks string, sharded bool, tbls map[string]*vindexes.Table) *vindexes.VSchema { keyspaceSchema := &vindexes.KeyspaceSchema{ Keyspace: &vindexes.Keyspace{ diff --git a/go/vt/vttablet/endtoend/rpc_test.go b/go/vt/vttablet/endtoend/rpc_test.go index a186d444f8d..e24137e1340 100644 --- a/go/vt/vttablet/endtoend/rpc_test.go +++ b/go/vt/vttablet/endtoend/rpc_test.go @@ -169,6 +169,25 @@ func TestGetSchemaRPC(t *testing.T) { }, getSchemaQueryType: querypb.SchemaTableType_ALL, getSchemaTables: []string{"vitess_temp1", "vitess_temp3", "unknown_table", "vitess_view3", "vitess_view1", "unknown_view"}, + }, { + name: "Create some internal tables", + queries: []string{ + "create table if not exists _vt_HOLD_6ace8bcef73211ea87e9f875a4d24e90_20200915120410(id bigint primary key);", + "create table vitess_temp1 (eid int);", + "create view vitess_view1 as select eid from vitess_a", + }, + deferQueries: []string{ + "drop table _vt_HOLD_6ace8bcef73211ea87e9f875a4d24e90_20200915120410", + "drop table vitess_temp1", + "drop view vitess_view1", + }, + mapToExpect: map[string]string{ + "vitess_view1": "CREATE ALGORITHM=UNDEFINED DEFINER=`vt_dba`@`localhost` SQL SECURITY DEFINER VIEW `vitess_view1` AS select `vitess_a`.`eid` AS `eid` from `vitess_a`", + "vitess_temp1": "CREATE TABLE `vitess_temp1` (\n `eid` int DEFAULT NULL\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci", + // These shouldn't be part of the result, so we verify it is empty. + "_vt_HOLD_6ace8bcef73211ea87e9f875a4d24e90_20200915120410": "", + }, + getSchemaQueryType: querypb.SchemaTableType_ALL, }, } diff --git a/go/vt/vttablet/grpctabletconn/conn.go b/go/vt/vttablet/grpctabletconn/conn.go index f73e379fc63..969f8eb1b33 100644 --- a/go/vt/vttablet/grpctabletconn/conn.go +++ b/go/vt/vttablet/grpctabletconn/conn.go @@ -473,6 +473,10 @@ func (conn *gRPCQueryClient) BeginExecute(ctx context.Context, target *querypb.T // BeginStreamExecute starts a transaction and runs an Execute. func (conn *gRPCQueryClient) BeginStreamExecute(ctx context.Context, target *querypb.Target, preQueries []string, query string, bindVars map[string]*querypb.BindVariable, reservedID int64, options *querypb.ExecuteOptions, callback func(*sqltypes.Result) error) (state queryservice.TransactionState, err error) { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() + conn.mu.RLock() defer conn.mu.RUnlock() if conn.cc == nil { @@ -650,6 +654,9 @@ func (conn *gRPCQueryClient) StreamHealth(ctx context.Context, callback func(*qu // VStream starts a VReplication stream. func (conn *gRPCQueryClient) VStream(ctx context.Context, request *binlogdatapb.VStreamRequest, send func([]*binlogdatapb.VEvent) error) error { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() stream, err := func() (queryservicepb.Query_VStreamClient, error) { conn.mu.RLock() defer conn.mu.RUnlock() @@ -695,6 +702,9 @@ func (conn *gRPCQueryClient) VStream(ctx context.Context, request *binlogdatapb. // VStreamRows streams rows of a query from the specified starting point. func (conn *gRPCQueryClient) VStreamRows(ctx context.Context, request *binlogdatapb.VStreamRowsRequest, send func(*binlogdatapb.VStreamRowsResponse) error) error { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() stream, err := func() (queryservicepb.Query_VStreamRowsClient, error) { conn.mu.RLock() defer conn.mu.RUnlock() @@ -737,6 +747,9 @@ func (conn *gRPCQueryClient) VStreamRows(ctx context.Context, request *binlogdat // VStreamTables streams rows of a query from the specified starting point. func (conn *gRPCQueryClient) VStreamTables(ctx context.Context, request *binlogdatapb.VStreamTablesRequest, send func(*binlogdatapb.VStreamTablesResponse) error) error { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() stream, err := func() (queryservicepb.Query_VStreamTablesClient, error) { conn.mu.RLock() defer conn.mu.RUnlock() @@ -777,6 +790,9 @@ func (conn *gRPCQueryClient) VStreamTables(ctx context.Context, request *binlogd // VStreamResults streams rows of a query from the specified starting point. func (conn *gRPCQueryClient) VStreamResults(ctx context.Context, target *querypb.Target, query string, send func(*binlogdatapb.VStreamResultsResponse) error) error { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() stream, err := func() (queryservicepb.Query_VStreamResultsClient, error) { conn.mu.RLock() defer conn.mu.RUnlock() @@ -856,6 +872,9 @@ func (conn *gRPCQueryClient) ReserveBeginExecute(ctx context.Context, target *qu // ReserveBeginStreamExecute implements the queryservice interface func (conn *gRPCQueryClient) ReserveBeginStreamExecute(ctx context.Context, target *querypb.Target, preQueries []string, postBeginQueries []string, sql string, bindVariables map[string]*querypb.BindVariable, options *querypb.ExecuteOptions, callback func(*sqltypes.Result) error) (state queryservice.ReservedTransactionState, err error) { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() conn.mu.RLock() defer conn.mu.RUnlock() if conn.cc == nil { @@ -967,6 +986,9 @@ func (conn *gRPCQueryClient) ReserveExecute(ctx context.Context, target *querypb // ReserveStreamExecute implements the queryservice interface func (conn *gRPCQueryClient) ReserveStreamExecute(ctx context.Context, target *querypb.Target, preQueries []string, sql string, bindVariables map[string]*querypb.BindVariable, transactionID int64, options *querypb.ExecuteOptions, callback func(*sqltypes.Result) error) (state queryservice.ReservedState, err error) { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() conn.mu.RLock() defer conn.mu.RUnlock() if conn.cc == nil { @@ -1060,6 +1082,9 @@ func (conn *gRPCQueryClient) Release(ctx context.Context, target *querypb.Target // GetSchema implements the queryservice interface func (conn *gRPCQueryClient) GetSchema(ctx context.Context, target *querypb.Target, tableType querypb.SchemaTableType, tableNames []string, callback func(schemaRes *querypb.GetSchemaResponse) error) error { + // Please see comments in StreamExecute to see how this works. + ctx, cancel := context.WithCancel(ctx) + defer cancel() conn.mu.RLock() defer conn.mu.RUnlock() if conn.cc == nil { diff --git a/go/vt/vttablet/grpctabletconn/conn_test.go b/go/vt/vttablet/grpctabletconn/conn_test.go index fb182bfe2e4..70e30e337bc 100644 --- a/go/vt/vttablet/grpctabletconn/conn_test.go +++ b/go/vt/vttablet/grpctabletconn/conn_test.go @@ -17,13 +17,21 @@ limitations under the License. package grpctabletconn import ( + "context" + "fmt" "io" "net" "os" + "sync" "testing" + "github.com/stretchr/testify/require" "google.golang.org/grpc" + "vitess.io/vitess/go/sqltypes" + binlogdatapb "vitess.io/vitess/go/vt/proto/binlogdata" + querypb "vitess.io/vitess/go/vt/proto/query" + queryservicepb "vitess.io/vitess/go/vt/proto/queryservice" "vitess.io/vitess/go/vt/servenv" "vitess.io/vitess/go/vt/vttablet/grpcqueryservice" "vitess.io/vitess/go/vt/vttablet/tabletconntest" @@ -113,3 +121,111 @@ func TestGRPCTabletAuthConn(t *testing.T) { }, }, service, f) } + +// mockQueryClient is a mock query client that returns an error from Streaming calls, +// but only after storing the context that was passed to the RPC. +type mockQueryClient struct { + lastCallCtx context.Context + queryservicepb.QueryClient +} + +func (m *mockQueryClient) StreamExecute(ctx context.Context, in *querypb.StreamExecuteRequest, opts ...grpc.CallOption) (queryservicepb.Query_StreamExecuteClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) BeginStreamExecute(ctx context.Context, in *querypb.BeginStreamExecuteRequest, opts ...grpc.CallOption) (queryservicepb.Query_BeginStreamExecuteClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) ReserveStreamExecute(ctx context.Context, in *querypb.ReserveStreamExecuteRequest, opts ...grpc.CallOption) (queryservicepb.Query_ReserveStreamExecuteClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) ReserveBeginStreamExecute(ctx context.Context, in *querypb.ReserveBeginStreamExecuteRequest, opts ...grpc.CallOption) (queryservicepb.Query_ReserveBeginStreamExecuteClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) VStream(ctx context.Context, in *binlogdatapb.VStreamRequest, opts ...grpc.CallOption) (queryservicepb.Query_VStreamClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) VStreamRows(ctx context.Context, in *binlogdatapb.VStreamRowsRequest, opts ...grpc.CallOption) (queryservicepb.Query_VStreamRowsClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) VStreamTables(ctx context.Context, in *binlogdatapb.VStreamTablesRequest, opts ...grpc.CallOption) (queryservicepb.Query_VStreamTablesClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) VStreamResults(ctx context.Context, in *binlogdatapb.VStreamResultsRequest, opts ...grpc.CallOption) (queryservicepb.Query_VStreamResultsClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +func (m *mockQueryClient) GetSchema(ctx context.Context, in *querypb.GetSchemaRequest, opts ...grpc.CallOption) (queryservicepb.Query_GetSchemaClient, error) { + m.lastCallCtx = ctx + return nil, fmt.Errorf("A general error") +} + +var _ queryservicepb.QueryClient = (*mockQueryClient)(nil) + +// TestGoRoutineLeakPrevention tests that after all the RPCs that stream queries, we end up closing the context that was passed to it, to prevent go routines from being leaked. +func TestGoRoutineLeakPrevention(t *testing.T) { + mqc := &mockQueryClient{} + qc := &gRPCQueryClient{ + mu: sync.RWMutex{}, + cc: &grpc.ClientConn{}, + c: mqc, + } + _ = qc.StreamExecute(context.Background(), nil, "", nil, 0, 0, nil, func(result *sqltypes.Result) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _, _ = qc.BeginStreamExecute(context.Background(), nil, nil, "", nil, 0, nil, func(result *sqltypes.Result) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _, _ = qc.ReserveBeginStreamExecute(context.Background(), nil, nil, nil, "", nil, nil, func(result *sqltypes.Result) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _, _ = qc.ReserveStreamExecute(context.Background(), nil, nil, "", nil, 0, nil, func(result *sqltypes.Result) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _ = qc.VStream(context.Background(), &binlogdatapb.VStreamRequest{}, func(events []*binlogdatapb.VEvent) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _ = qc.VStreamRows(context.Background(), &binlogdatapb.VStreamRowsRequest{}, func(response *binlogdatapb.VStreamRowsResponse) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _ = qc.VStreamResults(context.Background(), nil, "", func(response *binlogdatapb.VStreamResultsResponse) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _ = qc.VStreamTables(context.Background(), &binlogdatapb.VStreamTablesRequest{}, func(response *binlogdatapb.VStreamTablesResponse) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) + + _ = qc.GetSchema(context.Background(), nil, querypb.SchemaTableType_TABLES, nil, func(schemaRes *querypb.GetSchemaResponse) error { + return nil + }) + require.Error(t, mqc.lastCallCtx.Err()) +} diff --git a/go/vt/vttablet/grpctmclient/client.go b/go/vt/vttablet/grpctmclient/client.go index 0068ed74706..8759835c7d6 100644 --- a/go/vt/vttablet/grpctmclient/client.go +++ b/go/vt/vttablet/grpctmclient/client.go @@ -55,7 +55,7 @@ var ( ) func registerFlags(fs *pflag.FlagSet) { - fs.IntVar(&concurrency, "tablet_manager_grpc_concurrency", concurrency, "concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,AllPrivs,App})") + fs.IntVar(&concurrency, "tablet_manager_grpc_concurrency", concurrency, "concurrency to use to talk to a vttablet server for performance-sensitive RPCs (like ExecuteFetchAs{Dba,App} and CheckThrottler)") fs.StringVar(&cert, "tablet_manager_grpc_cert", cert, "the cert to use to connect") fs.StringVar(&key, "tablet_manager_grpc_key", key, "the key to use to connect") fs.StringVar(&ca, "tablet_manager_grpc_ca", ca, "the server ca to use to validate servers when connecting") @@ -94,10 +94,9 @@ type tmc struct { // grpcClient implements both dialer and poolDialer. type grpcClient struct { - // This cache of connections is to maximize QPS for ExecuteFetch. - // Note we'll keep the clients open and close them upon Close() only. - // But that's OK because usually the tasks that use them are - // one-purpose only. + // This cache of connections is to maximize QPS for ExecuteFetchAs{Dba,App} and + // CheckThrottler. Note we'll keep the clients open and close them upon Close() only. + // But that's OK because usually the tasks that use them are one-purpose only. // The map is protected by the mutex. mu sync.Mutex rpcClientMap map[string]chan *tmc @@ -115,16 +114,17 @@ type poolDialer interface { // Client implements tmclient.TabletManagerClient. // // Connections are produced by the dialer implementation, which is either the -// grpcClient implementation, which reuses connections only for ExecuteFetch and -// otherwise makes single-purpose connections that are closed after use. +// grpcClient implementation, which reuses connections only for ExecuteFetchAs{Dba,App} +// and CheckThrottler, otherwise making single-purpose connections that are closed +// after use. // // In order to more efficiently use the underlying tcp connections, you can // instead use the cachedConnDialer implementation by specifying // -// -tablet_manager_protocol "grpc-cached" +// --tablet_manager_protocol "grpc-cached" // -// The cachedConnDialer keeps connections to up to -tablet_manager_grpc_connpool_size distinct -// tablets open at any given time, for faster per-RPC call time, and less +// The cachedConnDialer keeps connections to up to --tablet_manager_grpc_connpool_size +// distinct tablets open at any given time, for faster per-RPC call time, and less // connection churn. type Client struct { dialer dialer @@ -1002,12 +1002,29 @@ func (client *Client) Backup(ctx context.Context, tablet *topodatapb.Tablet, req } // CheckThrottler is part of the tmclient.TabletManagerClient interface. +// It always tries to use a cached client via the dialer pool as this is +// called very frequently between tablets when the throttler is enabled in +// a keyspace and the overhead of creating a new gRPC connection/channel +// and dialing the other tablet every time is not practical. func (client *Client) CheckThrottler(ctx context.Context, tablet *topodatapb.Tablet, req *tabletmanagerdatapb.CheckThrottlerRequest) (*tabletmanagerdatapb.CheckThrottlerResponse, error) { - c, closer, err := client.dialer.dial(ctx, tablet) - if err != nil { - return nil, err + var c tabletmanagerservicepb.TabletManagerClient + var err error + if poolDialer, ok := client.dialer.(poolDialer); ok { + c, err = poolDialer.dialPool(ctx, tablet) + if err != nil { + return nil, err + } } - defer closer.Close() + + if c == nil { + var closer io.Closer + c, closer, err = client.dialer.dial(ctx, tablet) + if err != nil { + return nil, err + } + defer closer.Close() + } + response, err := c.CheckThrottler(ctx, req) if err != nil { return nil, err diff --git a/go/vt/vttablet/onlineddl/vrepl.go b/go/vt/vttablet/onlineddl/vrepl.go index cc669e11c11..50f9eb838d0 100644 --- a/go/vt/vttablet/onlineddl/vrepl.go +++ b/go/vt/vttablet/onlineddl/vrepl.go @@ -434,11 +434,26 @@ func (v *VRepl) analyzeTables(ctx context.Context, conn *dbconnpool.DBConnection for i := range v.sourceSharedColumns.Columns() { sourceColumn := v.sourceSharedColumns.Columns()[i] mappedColumn := v.targetSharedColumns.Columns()[i] - if sourceColumn.Type == vrepl.EnumColumnType && mappedColumn.Type != vrepl.EnumColumnType && mappedColumn.Charset != "" { - // A column is converted from ENUM type to textual type - v.targetSharedColumns.SetEnumToTextConversion(mappedColumn.Name, sourceColumn.EnumValues) - v.enumToTextMap[sourceColumn.Name] = sourceColumn.EnumValues + if sourceColumn.Type == vrepl.EnumColumnType { + switch { + // Either this is an ENUM column that stays an ENUM, or it is converted to a textual type. + // We take note of the enum values, and make it available in vreplication's Filter.Rule.ConvertEnumToText. + // This, in turn, will be used by vplayer (in TablePlan) like so: + // - In the binary log, enum values are integers. + // - Upon seeing this map, PlanBuilder will convert said int to the enum's logical string value. + // - And will apply the value as a string (`StringBindVariable`) in the query. + // What this allows is for enum values to have different ordering in the before/after table schema, + // so that for example you could modify an enum column: + // - from `('red', 'green', 'blue')` to `('red', 'blue')` + // - from `('red', 'green', 'blue')` to `('blue', 'red', 'green')` + case mappedColumn.Type == vrepl.EnumColumnType: + v.enumToTextMap[sourceColumn.Name] = sourceColumn.EnumValues + case mappedColumn.Charset != "": + v.enumToTextMap[sourceColumn.Name] = sourceColumn.EnumValues + v.targetSharedColumns.SetEnumToTextConversion(mappedColumn.Name, sourceColumn.EnumValues) + } } + if sourceColumn.IsIntegralType() && mappedColumn.Type == vrepl.EnumColumnType { v.intToEnumMap[sourceColumn.Name] = true } diff --git a/go/vt/vttablet/onlineddl/vrepl/columns.go b/go/vt/vttablet/onlineddl/vrepl/columns.go index 2937b1b2b2c..f2bb8f6d3f2 100644 --- a/go/vt/vttablet/onlineddl/vrepl/columns.go +++ b/go/vt/vttablet/onlineddl/vrepl/columns.go @@ -129,7 +129,7 @@ func isExpandedColumn(sourceColumn *Column, targetColumn *Column) (bool, string) return true, "source is unsigned, target is signed" } if sourceColumn.NumericPrecision <= targetColumn.NumericPrecision && !sourceColumn.IsUnsigned && targetColumn.IsUnsigned { - // e.g. INT SIGNED => INT UNSIGNED, INT SIGNED = BIGINT UNSIGNED + // e.g. INT SIGNED => INT UNSIGNED, INT SIGNED => BIGINT UNSIGNED return true, "target unsigned value exceeds source unsigned value" } if targetColumn.IsFloatingPoint() && !sourceColumn.IsFloatingPoint() { diff --git a/go/vt/vttablet/onlineddl/vrepl/columns_test.go b/go/vt/vttablet/onlineddl/vrepl/columns_test.go index b4d3ac9af58..201ffe55201 100644 --- a/go/vt/vttablet/onlineddl/vrepl/columns_test.go +++ b/go/vt/vttablet/onlineddl/vrepl/columns_test.go @@ -133,3 +133,248 @@ func TestGetSharedColumns(t *testing.T) { }) } } + +func TestGetExpandedColumnNames(t *testing.T) { + var ( + columnsA = &ColumnList{ + columns: []Column{ + { + Name: "c1", + IsNullable: true, + }, + { + Name: "c2", + IsNullable: true, + }, + { + Name: "c3", + IsNullable: false, + }, + }, + Ordinals: ColumnsMap{}, + } + columnsB = &ColumnList{ + columns: []Column{ + { + Name: "c1", + IsNullable: true, + }, + { + Name: "c2", + IsNullable: false, + }, + { + Name: "c3", + IsNullable: true, + }, + }, + Ordinals: ColumnsMap{}, + } + ) + tcases := []struct { + name string + sourceCol Column + targetCol Column + expanded bool + }{ + { + "both nullable", + Column{ + IsNullable: true, + }, + Column{ + IsNullable: true, + }, + false, + }, + { + "nullable to non nullable", + Column{ + IsNullable: true, + }, + Column{ + IsNullable: false, + }, + false, + }, + { + "non nullable to nullable", + Column{ + IsNullable: false, + }, + Column{ + IsNullable: true, + }, + true, + }, + { + "signed to unsigned", + Column{ + Type: IntegerColumnType, + NumericPrecision: 4, + IsUnsigned: false, + }, + Column{ + Type: IntegerColumnType, + NumericPrecision: 4, + IsUnsigned: true, + }, + true, + }, + { + "unsigned to signed", + Column{ + Type: IntegerColumnType, + NumericPrecision: 4, + IsUnsigned: true, + }, + Column{ + Type: IntegerColumnType, + NumericPrecision: 4, + IsUnsigned: false, + }, + true, + }, + { + "signed to smaller unsigned", + Column{ + Type: IntegerColumnType, + NumericPrecision: 8, + IsUnsigned: false, + }, + Column{ + Type: IntegerColumnType, + NumericPrecision: 4, + IsUnsigned: true, + }, + false, + }, + { + "same char length", + Column{ + CharacterMaximumLength: 20, + }, + Column{ + CharacterMaximumLength: 20, + }, + false, + }, + { + "reduced char length", + Column{ + CharacterMaximumLength: 20, + }, + Column{ + CharacterMaximumLength: 19, + }, + false, + }, + { + "increased char length", + Column{ + CharacterMaximumLength: 20, + }, + Column{ + CharacterMaximumLength: 21, + }, + true, + }, + { + "expand temporal", + Column{ + DataType: "time", + }, + Column{ + DataType: "timestamp", + }, + true, + }, + { + "expand temporal", + Column{ + DataType: "date", + }, + Column{ + DataType: "timestamp", + }, + true, + }, + { + "expand temporal", + Column{ + DataType: "date", + }, + Column{ + DataType: "datetime", + }, + true, + }, + { + "non expand temporal", + Column{ + DataType: "datetime", + }, + Column{ + DataType: "timestamp", + }, + false, + }, + { + "expand temporal", + Column{ + DataType: "timestamp", + }, + Column{ + DataType: "datetime", + }, + true, + }, + { + "expand enum", + Column{ + Type: EnumColumnType, + EnumValues: "'a', 'b'", + }, + Column{ + Type: EnumColumnType, + EnumValues: "'a', 'x'", + }, + true, + }, + { + "expand enum", + Column{ + Type: EnumColumnType, + EnumValues: "'a', 'b'", + }, + Column{ + Type: EnumColumnType, + EnumValues: "'a', 'b', 'c'", + }, + true, + }, + { + "reduce enum", + Column{ + Type: EnumColumnType, + EnumValues: "'a', 'b', 'c'", + }, + Column{ + Type: EnumColumnType, + EnumValues: "'a', 'b'", + }, + false, + }, + } + + expectedExpandedColumnNames := []string{"c3"} + expandedColumnNames, _ := GetExpandedColumnNames(columnsA, columnsB) + assert.Equal(t, expectedExpandedColumnNames, expandedColumnNames) + + for _, tcase := range tcases { + t.Run(tcase.name, func(t *testing.T) { + expanded, _ := isExpandedColumn(&tcase.sourceCol, &tcase.targetCol) + assert.Equal(t, tcase.expanded, expanded) + }) + } +} diff --git a/go/vt/vttablet/tabletmanager/rpc_query.go b/go/vt/vttablet/tabletmanager/rpc_query.go index 0d21cee7677..84efe7657b8 100644 --- a/go/vt/vttablet/tabletmanager/rpc_query.go +++ b/go/vt/vttablet/tabletmanager/rpc_query.go @@ -19,6 +19,9 @@ package tabletmanager import ( "context" + "vitess.io/vitess/go/vt/proto/vtrpc" + "vitess.io/vitess/go/vt/vterrors" + "vitess.io/vitess/go/constants/sidecar" "vitess.io/vitess/go/sqlescape" "vitess.io/vitess/go/sqltypes" @@ -29,6 +32,43 @@ import ( tabletmanagerdatapb "vitess.io/vitess/go/vt/proto/tabletmanagerdata" ) +// analyzeExecuteFetchAsDbaMultiQuery reutrns 'true' when at least one of the queries +// in the given SQL has a `/*vt+ allowZeroInDate=true */` directive. +func analyzeExecuteFetchAsDbaMultiQuery(sql string) (queries []string, parseable bool, countCreate int, allowZeroInDate bool, err error) { + queries, err = sqlparser.SplitStatementToPieces(sql) + if err != nil { + return nil, false, 0, false, err + } + if len(queries) == 0 { + return nil, false, 0, false, vterrors.Errorf(vtrpc.Code_INVALID_ARGUMENT, "no statements found in query: %s", sql) + } + parseable = true + for _, query := range queries { + // Some of the queries we receive here are legitimately non-parseable by our + // current parser, such as `CHANGE REPLICATION SOURCE TO...`. We must allow + // them and so we skip parsing errors. + stmt, err := sqlparser.Parse(query) + if err != nil { + parseable = false + continue + } + switch stmt.(type) { + case *sqlparser.CreateTable, *sqlparser.CreateView: + countCreate++ + default: + } + + if cmnt, ok := stmt.(sqlparser.Commented); ok { + directives := cmnt.GetParsedComments().Directives() + if directives.IsSet("allowZeroInDate") { + allowZeroInDate = true + } + } + + } + return queries, parseable, countCreate, allowZeroInDate, nil +} + // ExecuteFetchAsDba will execute the given query, possibly disabling binlogs and reload schema. func (tm *TabletManager) ExecuteFetchAsDba(ctx context.Context, req *tabletmanagerdatapb.ExecuteFetchAsDbaRequest) (*querypb.QueryResult, error) { // get a connection @@ -52,25 +92,41 @@ func (tm *TabletManager) ExecuteFetchAsDba(ctx context.Context, req *tabletmanag _, _ = conn.ExecuteFetch("USE "+sqlescape.EscapeID(req.DbName), 1, false) } - // Handle special possible directives - var directives *sqlparser.CommentDirectives - if stmt, err := sqlparser.Parse(string(req.Query)); err == nil { - if cmnt, ok := stmt.(sqlparser.Commented); ok { - directives = cmnt.GetParsedComments().Directives() + statements, _, countCreate, allowZeroInDate, err := analyzeExecuteFetchAsDbaMultiQuery(string(req.Query)) + if err != nil { + return nil, err + } + if len(statements) > 1 { + // Up to v19, we allow multi-statement SQL in ExecuteFetchAsDba, but only for the specific case + // where all statements are CREATE TABLE or CREATE VIEW. This is to support `ApplySchema --batch-size`. + // In v20, we will not support multi statements whatsoever. + // v20 will throw an error by virtua of using ExecuteFetch instead of ExecuteFetchMulti. + if countCreate != len(statements) { + return nil, vterrors.Errorf(vtrpc.Code_INVALID_ARGUMENT, "multi statement queries are not supported in ExecuteFetchAsDba unless all are CREATE TABLE or CREATE VIEW") } } - if directives.IsSet("allowZeroInDate") { + if allowZeroInDate { if _, err := conn.ExecuteFetch("set @@session.sql_mode=REPLACE(REPLACE(@@session.sql_mode, 'NO_ZERO_DATE', ''), 'NO_ZERO_IN_DATE', '')", 1, false); err != nil { return nil, err } } - // Replace any provided sidecar database qualifiers with the correct one. - uq, err := sqlparser.ReplaceTableQualifiers(string(req.Query), sidecar.DefaultName, sidecar.GetName()) + // TODO(shlomi): we use ReplaceTableQualifiersMultiQuery for backwards compatibility. In v20 we will not accept + // multi statement queries in ExecuteFetchAsDBA. This will be rewritten as ReplaceTableQualifiers() + uq, err := sqlparser.ReplaceTableQualifiersMultiQuery(string(req.Query), sidecar.DefaultName, sidecar.GetName()) if err != nil { return nil, err } - result, err := conn.ExecuteFetch(uq, int(req.MaxRows), true /*wantFields*/) + // TODO(shlomi): we use ExecuteFetchMulti for backwards compatibility. In v20 we will not accept + // multi statement queries in ExecuteFetchAsDBA. This will be rewritten as: + // (in v20): result, err := ExecuteFetch(uq, int(req.MaxRows), true /*wantFields*/) + result, more, err := conn.ExecuteFetchMulti(uq, int(req.MaxRows), true /*wantFields*/) + for more { + _, more, _, err = conn.ReadQueryResult(0, false) + if err != nil { + return nil, err + } + } // re-enable binlogs if necessary if req.DisableBinlogs && !conn.IsClosed() { diff --git a/go/vt/vttablet/tabletmanager/rpc_query_test.go b/go/vt/vttablet/tabletmanager/rpc_query_test.go index 87a64b2d8b7..79a6a9d1c75 100644 --- a/go/vt/vttablet/tabletmanager/rpc_query_test.go +++ b/go/vt/vttablet/tabletmanager/rpc_query_test.go @@ -21,6 +21,7 @@ import ( "strings" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "vitess.io/vitess/go/mysql" @@ -33,6 +34,83 @@ import ( tabletmanagerdatapb "vitess.io/vitess/go/vt/proto/tabletmanagerdata" ) +func TestAnalyzeExecuteFetchAsDbaMultiQuery(t *testing.T) { + tcases := []struct { + query string + count int + parseable bool + allowZeroInDate bool + allCreate bool + expectErr bool + }{ + { + query: "", + expectErr: true, + }, + { + query: "select * from t1 ; select * from t2", + count: 2, + parseable: true, + }, + { + query: "create table t(id int)", + count: 1, + allCreate: true, + parseable: true, + }, + { + query: "create table t(id int); create view v as select 1 from dual", + count: 2, + allCreate: true, + parseable: true, + }, + { + query: "create table t(id int); create view v as select 1 from dual; drop table t3", + count: 3, + allCreate: false, + parseable: true, + }, + { + query: "create /*vt+ allowZeroInDate=true */ table t (id int)", + count: 1, + allCreate: true, + allowZeroInDate: true, + parseable: true, + }, + { + query: "create table a (id int) ; create /*vt+ allowZeroInDate=true */ table b (id int)", + count: 2, + allCreate: true, + allowZeroInDate: true, + parseable: true, + }, + { + query: "stop replica; start replica", + count: 2, + parseable: false, + }, + { + query: "create table a (id int) ; --comment ; what", + count: 3, + parseable: false, + }, + } + for _, tcase := range tcases { + t.Run(tcase.query, func(t *testing.T) { + queries, parseable, countCreate, allowZeroInDate, err := analyzeExecuteFetchAsDbaMultiQuery(tcase.query) + if tcase.expectErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tcase.count, len(queries)) + assert.Equal(t, tcase.parseable, parseable) + assert.Equal(t, tcase.allCreate, (countCreate == len(queries))) + assert.Equal(t, tcase.allowZeroInDate, allowZeroInDate) + } + }) + } +} + func TestTabletManager_ExecuteFetchAsDba(t *testing.T) { ctx := context.Background() cp := mysql.ConnParams{} diff --git a/go/vt/vttablet/tabletmanager/vreplication/replicator_plan.go b/go/vt/vttablet/tabletmanager/vreplication/replicator_plan.go index 39ffdef04ae..b5deb309d45 100644 --- a/go/vt/vttablet/tabletmanager/vreplication/replicator_plan.go +++ b/go/vt/vttablet/tabletmanager/vreplication/replicator_plan.go @@ -332,8 +332,13 @@ func (tp *TablePlan) bindFieldVal(field *querypb.Field, val *sqltypes.Value) (*q if enumValues, ok := tp.EnumValuesMap[field.Name]; ok && !val.IsNull() { // The fact that this field has a EnumValuesMap entry, means we must // use the enum's text value as opposed to the enum's numerical value. - // Once known use case is with Online DDL, when a column is converted from - // ENUM to a VARCHAR/TEXT. + // This may be needed in Online DDL, when the enum column could be modified: + // - Either from ENUM to a text type (VARCHAR/TEXT) + // - Or from ENUM to another ENUM with different value ordering, + // e.g. from `('red', 'green', 'blue')` to `('red', 'blue')`. + // By applying the textual value of an enum we eliminate the ordering concern. + // In non-Online DDL this shouldn't be a concern because the schema is static, + // and so passing the enum's numerical value is sufficient. enumValue, enumValueOK := enumValues[val.ToString()] if !enumValueOK { return nil, vterrors.Errorf(vtrpcpb.Code_INTERNAL, "Invalid enum value: %v for field %s", val, field.Name) diff --git a/go/vt/vttablet/tabletserver/query_executor.go b/go/vt/vttablet/tabletserver/query_executor.go index 458c644a726..46a527e9bfa 100644 --- a/go/vt/vttablet/tabletserver/query_executor.go +++ b/go/vt/vttablet/tabletserver/query_executor.go @@ -1181,7 +1181,12 @@ func (qre *QueryExecutor) executeGetSchemaQuery(query string, callback func(sche return qre.execStreamSQL(conn, false /* isTransaction */, query, func(result *sqltypes.Result) error { schemaDef := make(map[string]string) for _, row := range result.Rows { - schemaDef[row[0].ToString()] = row[1].ToString() + tableName := row[0].ToString() + // Schema RPC should ignore the internal table in the response. + if schema.IsInternalOperationTableName(tableName) { + continue + } + schemaDef[tableName] = row[1].ToString() } return callback(&querypb.GetSchemaResponse{TableDefinition: schemaDef}) }) diff --git a/go/vt/vttablet/tabletserver/repltracker/poller.go b/go/vt/vttablet/tabletserver/repltracker/poller.go index ace01dffb2d..6fc964bef57 100644 --- a/go/vt/vttablet/tabletserver/repltracker/poller.go +++ b/go/vt/vttablet/tabletserver/repltracker/poller.go @@ -21,10 +21,10 @@ import ( "time" "vitess.io/vitess/go/stats" - "vitess.io/vitess/go/vt/mysqlctl" - vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" "vitess.io/vitess/go/vt/vterrors" + + vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" ) var replicationLagSeconds = stats.NewGauge("replicationLagSec", "replication lag in seconds") diff --git a/go/vt/vttablet/tabletserver/repltracker/repltracker.go b/go/vt/vttablet/tabletserver/repltracker/repltracker.go index 5ab44eb774e..d14bec8b708 100644 --- a/go/vt/vttablet/tabletserver/repltracker/repltracker.go +++ b/go/vt/vttablet/tabletserver/repltracker/repltracker.go @@ -23,10 +23,11 @@ import ( "vitess.io/vitess/go/stats" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/mysqlctl" - querypb "vitess.io/vitess/go/vt/proto/query" - topodatapb "vitess.io/vitess/go/vt/proto/topodata" "vitess.io/vitess/go/vt/vttablet/tabletserver/heartbeat" "vitess.io/vitess/go/vt/vttablet/tabletserver/tabletenv" + + querypb "vitess.io/vitess/go/vt/proto/query" + topodatapb "vitess.io/vitess/go/vt/proto/topodata" ) var ( diff --git a/go/vt/vttablet/tabletserver/rules/cached_size.go b/go/vt/vttablet/tabletserver/rules/cached_size.go index acfd199f1f2..1375ef2cb7b 100644 --- a/go/vt/vttablet/tabletserver/rules/cached_size.go +++ b/go/vt/vttablet/tabletserver/rules/cached_size.go @@ -108,7 +108,7 @@ func (cached *bvcre) CachedSize(alloc bool) int64 { } // field re *regexp.Regexp if cached.re != nil { - size += hack.RuntimeAllocSize(int64(153)) + size += hack.RuntimeAllocSize(int64(160)) } return size } @@ -124,7 +124,7 @@ func (cached *namedRegexp) CachedSize(alloc bool) int64 { size += hack.RuntimeAllocSize(int64(len(cached.name))) // field Regexp *regexp.Regexp if cached.Regexp != nil { - size += hack.RuntimeAllocSize(int64(153)) + size += hack.RuntimeAllocSize(int64(160)) } return size } diff --git a/go/vt/vttablet/tabletserver/throttle/throttler.go b/go/vt/vttablet/tabletserver/throttle/throttler.go index 6558b052c9a..3572a9e182d 100644 --- a/go/vt/vttablet/tabletserver/throttle/throttler.go +++ b/go/vt/vttablet/tabletserver/throttle/throttler.go @@ -316,7 +316,9 @@ func (throttler *Throttler) normalizeThrottlerConfig(throttlerConfig *topodatapb func (throttler *Throttler) WatchSrvKeyspaceCallback(srvks *topodatapb.SrvKeyspace, err error) bool { log.Infof("Throttler: WatchSrvKeyspaceCallback called with: %+v", srvks) if err != nil { - log.Errorf("WatchSrvKeyspaceCallback error: %v", err) + if !topo.IsErrType(err, topo.Interrupted) && !errors.Is(err, context.Canceled) { + log.Errorf("WatchSrvKeyspaceCallback error: %v", err) + } return false } throttlerConfig := throttler.normalizeThrottlerConfig(srvks.ThrottlerConfig) @@ -472,7 +474,7 @@ func (throttler *Throttler) Open() error { defer requestCancel() throttlerConfig, err := throttler.readThrottlerConfig(requestCtx) if err == nil { - log.Errorf("Throttler.retryReadAndApplyThrottlerConfig(): success reading throttler config: %+v", throttlerConfig) + log.Infof("Throttler.retryReadAndApplyThrottlerConfig(): success reading throttler config: %+v", throttlerConfig) // It's possible that during a retry-sleep, the throttler is closed and opened again, leading // to two (or more) instances of this goroutine. That's not a big problem; it's fine if all // attempt to read the throttler config; but we just want to ensure they don't step on each other diff --git a/go/vt/vttablet/tabletserver/txthrottler/mock_throttler_test.go b/go/vt/vttablet/tabletserver/txthrottler/mock_throttler_test.go index 3ffb3a78a1a..885e02dfe78 100644 --- a/go/vt/vttablet/tabletserver/txthrottler/mock_throttler_test.go +++ b/go/vt/vttablet/tabletserver/txthrottler/mock_throttler_test.go @@ -8,6 +8,8 @@ import ( reflect "reflect" time "time" + "vitess.io/vitess/go/vt/proto/topodata" + gomock "go.uber.org/mock/gomock" discovery "vitess.io/vitess/go/vt/discovery" @@ -63,6 +65,20 @@ func (mr *MockThrottlerInterfaceMockRecorder) GetConfiguration() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetConfiguration", reflect.TypeOf((*MockThrottlerInterface)(nil).GetConfiguration)) } +// MaxLag mocks base method. +func (m *MockThrottlerInterface) MaxLag(tabletType topodata.TabletType) uint32 { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "MaxLag", tabletType) + ret0, _ := ret[0].(uint32) + return ret0 +} + +// MaxLag indicates an expected call of LastMaxLagNotIgnoredForTabletType. +func (mr *MockThrottlerInterfaceMockRecorder) MaxLag(tabletType interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MaxLag", reflect.TypeOf((*MockThrottlerInterface)(nil).MaxLag), tabletType) +} + // MaxRate mocks base method. func (m *MockThrottlerInterface) MaxRate() int64 { m.ctrl.T.Helper() diff --git a/go/vt/vttablet/tabletserver/txthrottler/tx_throttler.go b/go/vt/vttablet/tabletserver/txthrottler/tx_throttler.go index 0b1c1c1fda3..e20c2db1796 100644 --- a/go/vt/vttablet/tabletserver/txthrottler/tx_throttler.go +++ b/go/vt/vttablet/tabletserver/txthrottler/tx_throttler.go @@ -22,6 +22,7 @@ import ( "reflect" "strings" "sync" + "sync/atomic" "time" "vitess.io/vitess/go/stats" @@ -81,6 +82,7 @@ type ThrottlerInterface interface { GetConfiguration() *throttlerdatapb.Configuration UpdateConfiguration(configuration *throttlerdatapb.Configuration, copyZeroValues bool) error ResetConfiguration() + MaxLag(tabletType topodatapb.TabletType) uint32 } // TxThrottlerName is the name the wrapped go/vt/throttler object will be registered with @@ -167,6 +169,10 @@ type txThrottlerStateImpl struct { // tabletTypes stores the tablet types for throttling tabletTypes map[topodatapb.TabletType]bool + + maxLag int64 + done chan bool + waitForTermination sync.WaitGroup } // NewTxThrottler tries to construct a txThrottler from the relevant @@ -245,7 +251,7 @@ func (t *txThrottler) Throttle(priority int, workload string) (result bool) { // Throttle according to both what the throttler state says and the priority. Workloads with lower priority value // are less likely to be throttled. - result = t.state.throttle() && rand.Intn(sqlparser.MaxPriorityValue) < priority + result = rand.Intn(sqlparser.MaxPriorityValue) < priority && t.state.throttle() t.requestsTotal.Add(workload, 1) if result { @@ -284,6 +290,7 @@ func newTxThrottlerState(txThrottler *txThrottler, config *tabletenv.TabletConfi tabletTypes: tabletTypes, throttler: t, txThrottler: txThrottler, + done: make(chan bool, 1), } // get cells from topo if none defined in tabletenv config @@ -298,6 +305,8 @@ func newTxThrottlerState(txThrottler *txThrottler, config *tabletenv.TabletConfi state.stopHealthCheck = cancel state.initHealthCheckStream(txThrottler.topoServer, target) go state.healthChecksProcessor(ctx, txThrottler.topoServer, target) + state.waitForTermination.Add(1) + go state.updateMaxLag() return state, nil } @@ -355,7 +364,35 @@ func (ts *txThrottlerStateImpl) throttle() bool { // Serialize calls to ts.throttle.Throttle() ts.throttleMu.Lock() defer ts.throttleMu.Unlock() - return ts.throttler.Throttle(0 /* threadId */) > 0 + + maxLag := atomic.LoadInt64(&ts.maxLag) + + return maxLag > ts.config.TxThrottlerConfig.TargetReplicationLagSec && + ts.throttler.Throttle(0 /* threadId */) > 0 +} + +func (ts *txThrottlerStateImpl) updateMaxLag() { + defer ts.waitForTermination.Done() + // We use half of the target lag to ensure we have enough resolution to see changes in lag below that value + ticker := time.NewTicker(time.Duration(ts.config.TxThrottlerConfig.TargetReplicationLagSec/2) * time.Second) + defer ticker.Stop() +outerloop: + for { + select { + case <-ticker.C: + var maxLag uint32 + + for tabletType := range ts.tabletTypes { + maxLagPerTabletType := ts.throttler.MaxLag(tabletType) + if maxLagPerTabletType > maxLag { + maxLag = maxLagPerTabletType + } + } + atomic.StoreInt64(&ts.maxLag, int64(maxLag)) + case <-ts.done: + break outerloop + } + } } func (ts *txThrottlerStateImpl) deallocateResources() { @@ -363,6 +400,8 @@ func (ts *txThrottlerStateImpl) deallocateResources() { ts.closeHealthCheckStream() ts.healthCheck = nil + ts.done <- true + ts.waitForTermination.Wait() // After ts.healthCheck is closed txThrottlerStateImpl.StatsUpdate() is guaranteed not // to be executing, so we can safely close the throttler. ts.throttler.Close() diff --git a/go/vt/vttablet/tabletserver/txthrottler/tx_throttler_test.go b/go/vt/vttablet/tabletserver/txthrottler/tx_throttler_test.go index 843aa0610a0..ed98986fb52 100644 --- a/go/vt/vttablet/tabletserver/txthrottler/tx_throttler_test.go +++ b/go/vt/vttablet/tabletserver/txthrottler/tx_throttler_test.go @@ -23,6 +23,7 @@ package txthrottler import ( "context" + "sync/atomic" "testing" "time" @@ -50,7 +51,7 @@ func TestDisabledThrottler(t *testing.T) { Shard: "shard", }) assert.Nil(t, throttler.Open()) - assert.False(t, throttler.Throttle(0, "some_workload")) + assert.False(t, throttler.Throttle(0, "some-workload")) throttlerImpl, _ := throttler.(*txThrottler) assert.Zero(t, throttlerImpl.throttlerRunning.Get()) throttler.Close() @@ -80,28 +81,44 @@ func TestEnabledThrottler(t *testing.T) { return mockThrottler, nil } - call0 := mockThrottler.EXPECT().UpdateConfiguration(gomock.Any(), true /* copyZeroValues */) - call1 := mockThrottler.EXPECT().Throttle(0) - call1.Return(0 * time.Second) + var calls []*gomock.Call + + call := mockThrottler.EXPECT().UpdateConfiguration(gomock.Any(), true /* copyZeroValues */) + calls = append(calls, call) + + // 1 + call = mockThrottler.EXPECT().Throttle(0) + call.Return(0 * time.Second) + calls = append(calls, call) + tabletStats := &discovery.TabletHealth{ Target: &querypb.Target{ Cell: "cell1", TabletType: topodatapb.TabletType_REPLICA, }, } - call2 := mockThrottler.EXPECT().RecordReplicationLag(gomock.Any(), tabletStats) - call3 := mockThrottler.EXPECT().Throttle(0) - call3.Return(1 * time.Second) + call = mockThrottler.EXPECT().RecordReplicationLag(gomock.Any(), tabletStats) + calls = append(calls, call) - call4 := mockThrottler.EXPECT().Throttle(0) - call4.Return(1 * time.Second) - calllast := mockThrottler.EXPECT().Close() + // 2 + call = mockThrottler.EXPECT().Throttle(0) + call.Return(1 * time.Second) + calls = append(calls, call) - call1.After(call0) - call2.After(call1) - call3.After(call2) - call4.After(call3) - calllast.After(call4) + // 3 + // Nothing gets mocked here because the order of evaluation in txThrottler.Throttle() evaluates first + // whether the priority allows for throttling or not, so no need to mock calls in mockThrottler.Throttle() + + // 4 + // Nothing gets mocked here because the order of evaluation in txThrottlerStateImpl.Throttle() evaluates first + // whether there is lag or not, so no call to the underlying mockThrottler is issued. + + call = mockThrottler.EXPECT().Close() + calls = append(calls, call) + + for i := 1; i < len(calls); i++ { + calls[i].After(calls[i-1]) + } config := tabletenv.NewDefaultConfig() config.EnableTxThrottler = true @@ -118,13 +135,20 @@ func TestEnabledThrottler(t *testing.T) { }) assert.Nil(t, throttlerImpl.Open()) - throttlerStateImpl := throttlerImpl.state.(*txThrottlerStateImpl) + throttlerStateImpl, ok := throttlerImpl.state.(*txThrottlerStateImpl) + assert.True(t, ok) assert.Equal(t, map[topodatapb.TabletType]bool{topodatapb.TabletType_REPLICA: true}, throttlerStateImpl.tabletTypes) assert.Equal(t, int64(1), throttlerImpl.throttlerRunning.Get()) - assert.False(t, throttlerImpl.Throttle(100, "some_workload")) - assert.Equal(t, int64(1), throttlerImpl.requestsTotal.Counts()["some_workload"]) - assert.Zero(t, throttlerImpl.requestsThrottled.Counts()["some_workload"]) + // Stop the go routine that keeps updating the cached shard's max lag to prevent it from changing the value in a + // way that will interfere with how we manipulate that value in our tests to evaluate different cases: + throttlerStateImpl.done <- true + + // 1 should not throttle due to return value of underlying Throttle(), despite high lag + atomic.StoreInt64(&throttlerStateImpl.maxLag, 20) + assert.False(t, throttlerImpl.Throttle(100, "some-workload")) + assert.Equal(t, int64(1), throttlerImpl.requestsTotal.Counts()["some-workload"]) + assert.Zero(t, throttlerImpl.requestsThrottled.Counts()["some-workload"]) throttlerImpl.state.StatsUpdate(tabletStats) // This calls replication lag thing assert.Equal(t, map[string]int64{"cell1.REPLICA": 1}, throttlerImpl.healthChecksReadTotal.Counts()) @@ -140,16 +164,23 @@ func TestEnabledThrottler(t *testing.T) { assert.Equal(t, map[string]int64{"cell1.REPLICA": 1, "cell2.RDONLY": 1}, throttlerImpl.healthChecksReadTotal.Counts()) assert.Equal(t, map[string]int64{"cell1.REPLICA": 1}, throttlerImpl.healthChecksRecordedTotal.Counts()) - // The second throttle call should reject. - assert.True(t, throttlerImpl.Throttle(100, "some_workload")) - assert.Equal(t, int64(2), throttlerImpl.requestsTotal.Counts()["some_workload"]) - assert.Equal(t, int64(1), throttlerImpl.requestsThrottled.Counts()["some_workload"]) + // 2 should throttle due to return value of underlying Throttle(), high lag & priority = 100 + assert.True(t, throttlerImpl.Throttle(100, "some-workload")) + assert.Equal(t, int64(2), throttlerImpl.requestsTotal.Counts()["some-workload"]) + assert.Equal(t, int64(1), throttlerImpl.requestsThrottled.Counts()["some-workload"]) + + // 3 should not throttle despite return value of underlying Throttle() and high lag, due to priority = 0 + assert.False(t, throttlerImpl.Throttle(0, "some-workload")) + assert.Equal(t, int64(3), throttlerImpl.requestsTotal.Counts()["some-workload"]) + assert.Equal(t, int64(1), throttlerImpl.requestsThrottled.Counts()["some-workload"]) - // This call should not throttle due to priority. Check that's the case and counters agree. - assert.False(t, throttlerImpl.Throttle(0, "some_workload")) - assert.Equal(t, int64(3), throttlerImpl.requestsTotal.Counts()["some_workload"]) - assert.Equal(t, int64(1), throttlerImpl.requestsThrottled.Counts()["some_workload"]) - throttlerImpl.Close() + // 4 should not throttle despite return value of underlying Throttle() and priority = 100, due to low lag + atomic.StoreInt64(&throttlerStateImpl.maxLag, 1) + assert.False(t, throttler.Throttle(100, "some-workload")) + assert.Equal(t, int64(4), throttlerImpl.requestsTotal.Counts()["some-workload"]) + assert.Equal(t, int64(1), throttlerImpl.requestsThrottled.Counts()["some-workload"]) + + throttler.Close() assert.Zero(t, throttlerImpl.throttlerRunning.Get()) } diff --git a/go/vt/vttest/vtprocess.go b/go/vt/vttest/vtprocess.go index 2053973b766..efebde98fb7 100644 --- a/go/vt/vttest/vtprocess.go +++ b/go/vt/vttest/vtprocess.go @@ -141,8 +141,7 @@ func (vtp *VtProcess) WaitStart() (err error) { vtp.proc.Args = append(vtp.proc.Args, vtp.ExtraArgs...) vtp.proc.Env = append(vtp.proc.Env, os.Environ()...) vtp.proc.Env = append(vtp.proc.Env, vtp.Env...) - - if testing.Verbose() { + if !testing.Testing() || testing.Verbose() { vtp.proc.Stderr = os.Stderr vtp.proc.Stdout = os.Stdout } diff --git a/go/vt/wrangler/traffic_switcher.go b/go/vt/wrangler/traffic_switcher.go index af20b95138c..c73ce1d03ee 100644 --- a/go/vt/wrangler/traffic_switcher.go +++ b/go/vt/wrangler/traffic_switcher.go @@ -40,6 +40,7 @@ import ( "vitess.io/vitess/go/vt/logutil" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/topo" + "vitess.io/vitess/go/vt/topo/topoproto" "vitess.io/vitess/go/vt/topotools" "vitess.io/vitess/go/vt/vtctl/workflow" "vitess.io/vitess/go/vt/vterrors" @@ -621,6 +622,20 @@ func (wr *Wrangler) SwitchWrites(ctx context.Context, targetKeyspace, workflowNa sw.cancelMigration(ctx, sm) return handleError("failed to create the reverse vreplication streams", err) } + + // Initialize any target sequences, if there are any, before allowing new writes. + if initializeTargetSequences && len(sequenceMetadata) > 0 { + ts.Logger().Infof("Initializing target sequences") + // Writes are blocked so we can safely initialize the sequence tables but + // we also want to use a shorter timeout than the parent context. + // We use at most half of the overall timeout. + initSeqCtx, cancel := context.WithTimeout(ctx, timeout/2) + defer cancel() + if err := sw.initializeTargetSequences(initSeqCtx, sequenceMetadata); err != nil { + sw.cancelMigration(ctx, sm) + return handleError(fmt.Sprintf("failed to initialize the sequences used in the %s keyspace", ts.TargetKeyspaceName()), err) + } + } } else { if cancel { return handleError("invalid cancel", fmt.Errorf("traffic switching has reached the point of no return, cannot cancel")) @@ -637,17 +652,6 @@ func (wr *Wrangler) SwitchWrites(ctx context.Context, targetKeyspace, workflowNa if err := sw.createJournals(ctx, sourceWorkflows); err != nil { return handleError("failed to create the journal", err) } - // Initialize any target sequences, if there are any, before allowing new writes. - if initializeTargetSequences && len(sequenceMetadata) > 0 { - // Writes are blocked so we can safely initialize the sequence tables but - // we also want to use a shorter timeout than the parent context. - // We use up at most half of the overall timeout. - initSeqCtx, cancel := context.WithTimeout(ctx, timeout/2) - defer cancel() - if err := sw.initializeTargetSequences(initSeqCtx, sequenceMetadata); err != nil { - return handleError(fmt.Sprintf("failed to initialize the sequences used in the %s keyspace", ts.TargetKeyspaceName()), err) - } - } if err := sw.allowTargetWrites(ctx); err != nil { return handleError(fmt.Sprintf("failed to allow writes in the %s keyspace", ts.TargetKeyspaceName()), err) } @@ -2144,13 +2148,17 @@ func (ts *trafficSwitcher) initializeTargetSequences(ctx context.Context, sequen ) qr, terr := ts.wr.ExecuteFetchAsApp(ictx, primary.GetAlias(), true, query.Query, 1) if terr != nil || len(qr.Rows) != 1 { - return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s in order to initialize the backing sequence table: %v", - ts.targetKeyspace, sequenceMetadata.usingTableName, terr) + return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s on tablet %s in order to initialize the backing sequence table: %v", + ts.targetKeyspace, sequenceMetadata.usingTableName, topoproto.TabletAliasString(primary.Alias), terr) } - maxID, terr := sqltypes.Proto3ToResult(qr).Rows[0][0].ToInt64() - if terr != nil { - return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s in order to initialize the backing sequence table: %v", - ts.targetKeyspace, sequenceMetadata.usingTableName, terr) + rawVal := sqltypes.Proto3ToResult(qr).Rows[0][0] + maxID := int64(0) + if !rawVal.IsNull() { // If it's NULL then there are no rows and 0 remains the max + maxID, terr = rawVal.ToInt64() + if terr != nil { + return vterrors.Errorf(vtrpcpb.Code_INTERNAL, "failed to get the max used sequence value for target table %s.%s on tablet %s in order to initialize the backing sequence table: %v", + ts.targetKeyspace, sequenceMetadata.usingTableName, topoproto.TabletAliasString(primary.Alias), terr) + } } srMu.Lock() defer srMu.Unlock() diff --git a/go/vt/wrangler/traffic_switcher_env_test.go b/go/vt/wrangler/traffic_switcher_env_test.go index 085e4d7be52..4a5477212e9 100644 --- a/go/vt/wrangler/traffic_switcher_env_test.go +++ b/go/vt/wrangler/traffic_switcher_env_test.go @@ -255,7 +255,7 @@ func newTestTableMigraterCustom(ctx context.Context, t *testing.T, sourceShards, "maxval", "int64", ), - "5", + "NULL", ), ) tme.tmeDB.AddQuery(fmt.Sprintf(maxValForSequence, "ks2", "t2"), @@ -271,7 +271,7 @@ func newTestTableMigraterCustom(ctx context.Context, t *testing.T, sourceShards, // Now tell the fakesqldb used by the global keyspace tablets to expect // the sequence management related queries against the target keyspace. gfdb.AddQuery( - sqlparser.BuildParsedQuery(sqlInitSequenceTable, sqlescape.EscapeID("vt_global"), sqlescape.EscapeID("t1_seq"), 6, 6, 6).Query, + sqlparser.BuildParsedQuery(sqlInitSequenceTable, sqlescape.EscapeID("vt_global"), sqlescape.EscapeID("t1_seq"), 1, 1, 1).Query, &sqltypes.Result{RowsAffected: 0}, ) gfdb.AddQuery( diff --git a/go/vt/wrangler/traffic_switcher_test.go b/go/vt/wrangler/traffic_switcher_test.go index 7bd52cb961d..340d8f413f8 100644 --- a/go/vt/wrangler/traffic_switcher_test.go +++ b/go/vt/wrangler/traffic_switcher_test.go @@ -1009,8 +1009,8 @@ func TestTableMigrateOneToManyDryRun(t *testing.T) { "\tKeyspace ks1, Shard 0 at Position MariaDB/5-456-892", "Wait for VReplication on stopped streams to catchup for up to 1s", "Create reverse replication workflow test_reverse", - "Create journal entries on source databases", "The following sequence backing tables used by tables being moved will be initialized: t1_seq,t2_seq", + "Create journal entries on source databases", "Enable writes on keyspace ks2 tables [t1,t2]", "Switch routing from keyspace ks1 to keyspace ks2", "Routing rules for tables [t1,t2] will be updated", diff --git a/test/templates/cluster_endtoend_test.tpl b/test/templates/cluster_endtoend_test.tpl index f1c825a0c12..3345f7d9c7a 100644 --- a/test/templates/cluster_endtoend_test.tpl +++ b/test/templates/cluster_endtoend_test.tpl @@ -47,13 +47,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' diff --git a/test/templates/cluster_endtoend_test_docker.tpl b/test/templates/cluster_endtoend_test_docker.tpl index e42116fed69..de14606a3ea 100644 --- a/test/templates/cluster_endtoend_test_docker.tpl +++ b/test/templates/cluster_endtoend_test_docker.tpl @@ -32,13 +32,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' diff --git a/test/templates/cluster_endtoend_test_mysql57.tpl b/test/templates/cluster_endtoend_test_mysql57.tpl index 69a6028b316..8a3a5e6ac67 100644 --- a/test/templates/cluster_endtoend_test_mysql57.tpl +++ b/test/templates/cluster_endtoend_test_mysql57.tpl @@ -52,13 +52,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' diff --git a/test/templates/cluster_endtoend_test_self_hosted.tpl b/test/templates/cluster_endtoend_test_self_hosted.tpl index d9b48f6aecf..55c891ab95c 100644 --- a/test/templates/cluster_endtoend_test_self_hosted.tpl +++ b/test/templates/cluster_endtoend_test_self_hosted.tpl @@ -35,13 +35,15 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' filters: | end_to_end: - 'go/**/*.go' + - 'go/vt/sidecardb/**/*.sql' + - 'go/test/endtoend/onlineddl/vrepl_suite/**' - 'test.go' - 'Makefile' - 'build.env' diff --git a/test/templates/unit_test.tpl b/test/templates/unit_test.tpl index 73ce4737fcd..e9793e6db14 100644 --- a/test/templates/unit_test.tpl +++ b/test/templates/unit_test.tpl @@ -47,7 +47,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: '' diff --git a/test/templates/unit_test_self_hosted.tpl b/test/templates/unit_test_self_hosted.tpl index 45d88392b9b..0e8c97d4907 100644 --- a/test/templates/unit_test_self_hosted.tpl +++ b/test/templates/unit_test_self_hosted.tpl @@ -34,7 +34,7 @@ jobs: - name: Check for changes in relevant files if: steps.skip-workflow.outputs.skip-workflow == 'false' - uses: frouioui/paths-filter@main + uses: dorny/paths-filter@v3.0.1 id: changes with: token: ''