From 048d4d24400fb75fec54a84d7c5cdb3cd58d96e5 Mon Sep 17 00:00:00 2001 From: Dirkjan Bussink Date: Mon, 24 Jun 2024 18:15:12 +0200 Subject: [PATCH] sqlparser: Remove unneeded escaping (#16255) Signed-off-by: Dirkjan Bussink --- go/sqltypes/value.go | 23 +++ go/sqltypes/value_test.go | 8 +- .../binlog/binlogplayer/binlog_player_test.go | 4 +- go/vt/sqlparser/ast_format.go | 4 +- go/vt/sqlparser/parse_test.go | 170 +++++++++--------- go/vt/sqlparser/testdata/select_cases.txt | 20 +-- .../planbuilder/testdata/select_cases.json | 28 +-- .../tabletmanager/rpc_vreplication_test.go | 54 +++--- .../tabletmanager/vdiff/action_test.go | 4 +- .../tabletmanager/vdiff/engine_test.go | 4 +- .../vreplication/insert_generator_test.go | 4 +- .../vreplication/journal_test.go | 8 +- .../vreplication/vcopier_test.go | 54 +++--- go/vt/wrangler/materializer_test.go | 48 +++-- go/vt/wrangler/resharder_test.go | 36 ++-- go/vt/wrangler/traffic_switcher_test.go | 8 +- 16 files changed, 253 insertions(+), 224 deletions(-) diff --git a/go/sqltypes/value.go b/go/sqltypes/value.go index bb4e26d15e3..4dde979066b 100644 --- a/go/sqltypes/value.go +++ b/go/sqltypes/value.go @@ -861,7 +861,25 @@ var SQLEncodeMap [256]byte // SQLDecodeMap is the reverse of SQLEncodeMap var SQLDecodeMap [256]byte +// encodeRef is a map of characters we use for escaping. +// This doesn't include double quotes since we don't need +// to escape that, as we always generate single quoted strings. var encodeRef = map[byte]byte{ + '\x00': '0', + '\'': '\'', + '\b': 'b', + '\n': 'n', + '\r': 'r', + '\t': 't', + 26: 'Z', // ctl-Z + '\\': '\\', +} + +// decodeRef is a map of characters we use for unescaping. +// We do need all characters here, since we do accept +// escaped double quotes in single quote strings and +// double quoted strings. +var decodeRef = map[byte]byte{ '\x00': '0', '\'': '\'', '"': '"', @@ -931,6 +949,11 @@ func init() { for i := range SQLEncodeMap { if to, ok := encodeRef[byte(i)]; ok { SQLEncodeMap[byte(i)] = to + } + } + + for i := range SQLDecodeMap { + if to, ok := decodeRef[byte(i)]; ok { SQLDecodeMap[to] = byte(i) } } diff --git a/go/sqltypes/value_test.go b/go/sqltypes/value_test.go index 36a0f5a5090..c7bdf1234dd 100644 --- a/go/sqltypes/value_test.go +++ b/go/sqltypes/value_test.go @@ -380,7 +380,7 @@ func TestEncode(t *testing.T) { outASCII: "'Zm9v'", }, { in: TestValue(VarChar, "\x00'\"\b\n\r\t\x1A\\"), - outSQL: "'\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\'", + outSQL: "'\\0\\'\"\\b\\n\\r\\t\\Z\\\\'", outASCII: "'ACciCAoNCRpc'", }, { in: TestValue(Bit, "a"), @@ -442,7 +442,7 @@ func TestEncodeStringSQL(t *testing.T) { }, { in: "\x00'\"\b\n\r\t\x1A\\", - out: "'\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\'", + out: "'\\0\\'\"\\b\\n\\r\\t\\Z\\\\'", }, } for _, tcase := range testcases { @@ -632,7 +632,7 @@ func TestEncodeSQLStringBuilder(t *testing.T) { outSQL: "'foo'", }, { in: TestValue(VarChar, "\x00'\"\b\n\r\t\x1A\\"), - outSQL: "'\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\'", + outSQL: "'\\0\\'\"\\b\\n\\r\\t\\Z\\\\'", }, { in: TestValue(Bit, "a"), outSQL: "b'01100001'", @@ -663,7 +663,7 @@ func TestEncodeSQLBytes2(t *testing.T) { outSQL: "'foo'", }, { in: TestValue(VarChar, "\x00'\"\b\n\r\t\x1A\\"), - outSQL: "'\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\'", + outSQL: "'\\0\\'\"\\b\\n\\r\\t\\Z\\\\'", }, { in: TestValue(Bit, "a"), outSQL: "b'01100001'", diff --git a/go/vt/binlog/binlogplayer/binlog_player_test.go b/go/vt/binlog/binlogplayer/binlog_player_test.go index 5c6e28df704..99b0ef496b3 100644 --- a/go/vt/binlog/binlogplayer/binlog_player_test.go +++ b/go/vt/binlog/binlogplayer/binlog_player_test.go @@ -382,7 +382,7 @@ func applyEvents(blp *BinlogPlayer) func() error { func TestCreateVReplicationKeyRange(t *testing.T) { want := "insert into _vt.vreplication " + "(workflow, source, pos, max_tps, max_replication_lag, time_updated, transaction_timestamp, state, db_name, workflow_type, workflow_sub_type, defer_secondary_keys, options) " + - `values ('Resharding', 'keyspace:\"ks\" shard:\"0\" key_range:{end:\"\\x80\"}', 'MariaDB/0-1-1083', 9223372036854775807, 9223372036854775807, 481823, 0, 'Running', 'db', 0, 0, false, '{}')` + `values ('Resharding', 'keyspace:"ks" shard:"0" key_range:{end:"\\x80"}', 'MariaDB/0-1-1083', 9223372036854775807, 9223372036854775807, 481823, 0, 'Running', 'db', 0, 0, false, '{}')` bls := binlogdatapb.BinlogSource{ Keyspace: "ks", @@ -401,7 +401,7 @@ func TestCreateVReplicationKeyRange(t *testing.T) { func TestCreateVReplicationTables(t *testing.T) { want := "insert into _vt.vreplication " + "(workflow, source, pos, max_tps, max_replication_lag, time_updated, transaction_timestamp, state, db_name, workflow_type, workflow_sub_type, defer_secondary_keys, options) " + - `values ('Resharding', 'keyspace:\"ks\" shard:\"0\" tables:\"a\" tables:\"b\"', 'MariaDB/0-1-1083', 9223372036854775807, 9223372036854775807, 481823, 0, 'Running', 'db', 0, 0, false, '{}')` + `values ('Resharding', 'keyspace:"ks" shard:"0" tables:"a" tables:"b"', 'MariaDB/0-1-1083', 9223372036854775807, 9223372036854775807, 481823, 0, 'Running', 'db', 0, 0, false, '{}')` bls := binlogdatapb.BinlogSource{ Keyspace: "ks", diff --git a/go/vt/sqlparser/ast_format.go b/go/vt/sqlparser/ast_format.go index 8d8a01a6eb2..f01bc9d117e 100644 --- a/go/vt/sqlparser/ast_format.go +++ b/go/vt/sqlparser/ast_format.go @@ -839,7 +839,9 @@ func (idx *IndexDefinition) Format(buf *TrackedBuffer) { buf.astPrintf(idx, ")") for _, opt := range idx.Options { - buf.astPrintf(idx, " %s", opt.Name) + if opt.Name != "" { + buf.astPrintf(idx, " %s", opt.Name) + } if opt.String != "" { buf.astPrintf(idx, " %#s", opt.String) } else if opt.Value != nil { diff --git a/go/vt/sqlparser/parse_test.go b/go/vt/sqlparser/parse_test.go index 0fef81f4514..4dddb9a12af 100644 --- a/go/vt/sqlparser/parse_test.go +++ b/go/vt/sqlparser/parse_test.go @@ -1089,7 +1089,7 @@ var ( output: "select /* quote quote in string */ 'a\\'a' from t", }, { input: "select /* double quote quote in string */ \"a\"\"a\" from t", - output: "select /* double quote quote in string */ 'a\\\"a' from t", + output: "select /* double quote quote in string */ 'a\"a' from t", }, { input: "select /* quote in double quoted string */ \"a'a\" from t", output: "select /* quote in double quoted string */ 'a\\'a' from t", @@ -1098,7 +1098,8 @@ var ( }, { input: "select /* literal backslash in string */ 'a\\\\na' from t", }, { - input: "select /* all escapes */ '\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\' from t", + input: "select /* all escapes */ '\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\' from t", + output: "select /* all escapes */ '\\0\\'\"\\b\\n\\r\\t\\Z\\\\' from t", }, { input: "select /* non-escape */ '\\x' from t", output: "select /* non-escape */ 'x' from t", @@ -2928,10 +2929,10 @@ var ( output: "deallocate /* comment */ prepare stmt1", }, { input: `SELECT JSON_PRETTY('{"a":"10","b":"15","x":"25"}')`, - output: `select json_pretty('{\"a\":\"10\",\"b\":\"15\",\"x\":\"25\"}') from dual`, + output: `select json_pretty('{"a":"10","b":"15","x":"25"}') from dual`, }, { input: `SELECT JSON_PRETTY(N'{"a":"10","b":"15","x":"25"}')`, - output: `select json_pretty(N'{\"a\":\"10\",\"b\":\"15\",\"x\":\"25\"}') from dual`, + output: `select json_pretty(N'{"a":"10","b":"15","x":"25"}') from dual`, /*We need to ignore this test because, after the normalizer, we change the produced NChar string into an introducer expression, so the vttablet will never see a NChar string */ ignoreNormalizerTest: true, @@ -2946,13 +2947,13 @@ var ( output: "select jcol, json_storage_size(jcol) as Size from jtable", }, { input: `SELECT jcol, JSON_STORAGE_SIZE(N'{"a":"10","b":"15","x":"25"}') AS Size FROM jtable`, - output: `select jcol, json_storage_size(N'{\"a\":\"10\",\"b\":\"15\",\"x\":\"25\"}') as Size from jtable`, + output: `select jcol, json_storage_size(N'{"a":"10","b":"15","x":"25"}') as Size from jtable`, /*We need to ignore this test because, after the normalizer, we change the produced NChar string into an introducer expression, so the vttablet will never see a NChar string */ ignoreNormalizerTest: true, }, { input: `SELECT JSON_STORAGE_SIZE('[100, "sakila", [1, 3, 5], 425.05]') AS A, JSON_STORAGE_SIZE('{"a": 1000, "b": "a", "c": "[1, 3, 5, 7]"}') AS B, JSON_STORAGE_SIZE('{"a": 1000, "b": "wxyz", "c": "[1, 3, 5, 7]"}') AS C,JSON_STORAGE_SIZE('[100, "json", [[10, 20, 30], 3, 5], 425.05]') AS D`, - output: `select json_storage_size('[100, \"sakila\", [1, 3, 5], 425.05]') as A, json_storage_size('{\"a\": 1000, \"b\": \"a\", \"c\": \"[1, 3, 5, 7]\"}') as B, json_storage_size('{\"a\": 1000, \"b\": \"wxyz\", \"c\": \"[1, 3, 5, 7]\"}') as C, json_storage_size('[100, \"json\", [[10, 20, 30], 3, 5], 425.05]') as D from dual`, + output: `select json_storage_size('[100, "sakila", [1, 3, 5], 425.05]') as A, json_storage_size('{"a": 1000, "b": "a", "c": "[1, 3, 5, 7]"}') as B, json_storage_size('{"a": 1000, "b": "wxyz", "c": "[1, 3, 5, 7]"}') as C, json_storage_size('[100, "json", [[10, 20, 30], 3, 5], 425.05]') as D from dual`, }, { input: "SELECT JSON_STORAGE_SIZE(@j)", output: "select json_storage_size(@j) from dual", @@ -2961,10 +2962,10 @@ var ( output: "select json_storage_free(jcol) from jtable", }, { input: `SELECT JSON_STORAGE_FREE('{"a":"10","b":"15","x":"25"}')`, - output: `select json_storage_free('{\"a\":\"10\",\"b\":\"15\",\"x\":\"25\"}') from dual`, + output: `select json_storage_free('{"a":"10","b":"15","x":"25"}') from dual`, }, { input: `SELECT JSON_STORAGE_FREE(N'{"a":"10","b":"15","x":"25"}')`, - output: `select json_storage_free(N'{\"a\":\"10\",\"b\":\"15\",\"x\":\"25\"}') from dual`, + output: `select json_storage_free(N'{"a":"10","b":"15","x":"25"}') from dual`, /*We need to ignore this test because, after the normalizer, we change the produced NChar string into an introducer expression, so the vttablet will never see a NChar string */ ignoreNormalizerTest: true, @@ -3003,13 +3004,13 @@ var ( output: "select trim(both 'a' from 'abc') from dual", }, { input: `SELECT * FROM JSON_TABLE('[ {"c1": null} ]','$[*]' COLUMNS( c1 INT PATH '$.c1' ERROR ON ERROR )) as jt`, - output: `select * from json_table('[ {\"c1\": null} ]', '$[*]' columns( + output: `select * from json_table('[ {"c1": null} ]', '$[*]' columns( c1 INT path '$.c1' error on error ) ) as jt`, }, { input: `SELECT * FROM JSON_TABLE( '[{"a": 1, "b": [11,111]}, {"a": 2, "b": [22,222]}]', '$[*]' COLUMNS(a INT PATH '$.a', NESTED PATH '$.b[*]' COLUMNS (b1 INT PATH '$'), NESTED PATH '$.b[*]' COLUMNS (b2 INT PATH '$'))) AS jt`, - output: `select * from json_table('[{\"a\": 1, \"b\": [11,111]}, {\"a\": 2, \"b\": [22,222]}]', '$[*]' columns( + output: `select * from json_table('[{"a": 1, "b": [11,111]}, {"a": 2, "b": [22,222]}]', '$[*]' columns( a INT path '$.a' , nested path '$.b[*]' columns( b1 INT path '$' @@ -3021,22 +3022,22 @@ var ( ) as jt`, }, { input: `SELECT * FROM JSON_TABLE('[ {"c1": null} ]','$[*]' COLUMNS( c1 INT PATH '$.c1' ERROR ON ERROR )) as jt`, - output: `select * from json_table('[ {\"c1\": null} ]', '$[*]' columns( + output: `select * from json_table('[ {"c1": null} ]', '$[*]' columns( c1 INT path '$.c1' error on error ) ) as jt`, }, { input: `SELECT * FROM JSON_TABLE('[{"a":"3"},{"a":2},{"b":1},{"a":0},{"a":[1,2]}]', "$[*]" COLUMNS(rowid FOR ORDINALITY, ac VARCHAR(100) PATH "$.a" DEFAULT '111' ON EMPTY DEFAULT '999' ON ERROR, aj JSON PATH "$.a" DEFAULT '{"x": 333}' ON EMPTY, bx INT EXISTS PATH "$.b" ) ) AS tt`, - output: `select * from json_table('[{\"a\":\"3\"},{\"a\":2},{\"b\":1},{\"a\":0},{\"a\":[1,2]}]', '$[*]' columns( + output: `select * from json_table('[{"a":"3"},{"a":2},{"b":1},{"a":0},{"a":[1,2]}]', '$[*]' columns( rowid for ordinality, ac VARCHAR(100) path '$.a' default '111' on empty default '999' on error , - aj JSON path '$.a' default '{\"x\": 333}' on empty , + aj JSON path '$.a' default '{"x": 333}' on empty , bx INT exists path '$.b' ) ) as tt`, }, { input: `SELECT * FROM JSON_TABLE( '[ {"a": 1, "b": [11,111]}, {"a": 2, "b": [22,222]}, {"a":3}]', '$[*]' COLUMNS( a INT PATH '$.a', NESTED PATH '$.b[*]' COLUMNS (b INT PATH '$') ) ) AS jt WHERE b IS NOT NULL`, - output: `select * from json_table('[ {\"a\": 1, \"b\": [11,111]}, {\"a\": 2, \"b\": [22,222]}, {\"a\":3}]', '$[*]' columns( + output: `select * from json_table('[ {"a": 1, "b": [11,111]}, {"a": 2, "b": [22,222]}, {"a":3}]', '$[*]' columns( a INT path '$.a' , nested path '$.b[*]' columns( b INT path '$' @@ -3045,14 +3046,14 @@ var ( ) as jt where b is not null`, }, { input: `SELECT * FROM JSON_TABLE( '[{"x":2,"y":"8"},{"x":"3","y":"7"},{"x":"4","y":6}]', "$[1]" COLUMNS( xval VARCHAR(100) PATH "$.x", yval VARCHAR(100) PATH "$.y" ) ) AS jt1`, - output: `select * from json_table('[{\"x\":2,\"y\":\"8\"},{\"x\":\"3\",\"y\":\"7\"},{\"x\":\"4\",\"y\":6}]', '$[1]' columns( + output: `select * from json_table('[{"x":2,"y":"8"},{"x":"3","y":"7"},{"x":"4","y":6}]', '$[1]' columns( xval VARCHAR(100) path '$.x' , yval VARCHAR(100) path '$.y' ) ) as jt1`, }, { input: `SELECT * FROM JSON_TABLE( '[{"a": "a_val","b": [{"c": "c_val", "l": [1,2]}]},{"a": "a_val", "b": [{"c": "c_val","l": [11]}, {"c": "c_val", "l": [22]}]}]', '$[*]' COLUMNS( top_ord FOR ORDINALITY, apath VARCHAR(10) PATH '$.a', NESTED PATH '$.b[*]' COLUMNS ( bpath VARCHAR(10) PATH '$.c', ord FOR ORDINALITY, NESTED PATH '$.l[*]' COLUMNS (lpath varchar(10) PATH '$') ) )) as jt`, - output: `select * from json_table('[{\"a\": \"a_val\",\"b\": [{\"c\": \"c_val\", \"l\": [1,2]}]},{\"a\": \"a_val\", \"b\": [{\"c\": \"c_val\",\"l\": [11]}, {\"c\": \"c_val\", \"l\": [22]}]}]', '$[*]' columns( + output: `select * from json_table('[{"a": "a_val","b": [{"c": "c_val", "l": [1,2]}]},{"a": "a_val", "b": [{"c": "c_val","l": [11]}, {"c": "c_val", "l": [22]}]}]', '$[*]' columns( top_ord for ordinality, apath VARCHAR(10) path '$.a' , nested path '$.b[*]' columns( @@ -3066,7 +3067,7 @@ var ( ) as jt`, }, { input: `SELECT * FROM JSON_TABLE('[{"x":2,"y":"8"},{"x":"3","y":"7"},{"x":"4","y":6}]', "$[1]" COLUMNS( xval VARCHAR(100) PATH "$.x", yval VARCHAR(100) PATH "$.y")) AS jt1;`, - output: `select * from json_table('[{\"x\":2,\"y\":\"8\"},{\"x\":\"3\",\"y\":\"7\"},{\"x\":\"4\",\"y\":6}]', '$[1]' columns( + output: `select * from json_table('[{"x":2,"y":"8"},{"x":"3","y":"7"},{"x":"4","y":6}]', '$[1]' columns( xval VARCHAR(100) path '$.x' , yval VARCHAR(100) path '$.y' ) @@ -3106,7 +3107,7 @@ var ( output: "select json_quote(BIN(11)) from dual", }, { input: `SELECT JSON_QUOTE('null'), JSON_QUOTE('"null"')`, - output: `select json_quote('null'), json_quote('\"null\"') from dual`, + output: `select json_quote('null'), json_quote('"null"') from dual`, }, { input: "select t1.a, dt.a from t1, lateral (select t1.a+t2.a as a from t2) dt", output: "select t1.a, dt.a from t1, lateral (select t1.a + t2.a as a from t2) as dt", @@ -3115,37 +3116,37 @@ var ( output: "select b from v1 as vq1, lateral (select count(*) from v1 as vq2 having vq1.b = 3) as dt", }, { input: `SELECT JSON_SCHEMA_VALID('{"type":"string","pattern":"("}', '"abc"')`, - output: `select json_schema_valid('{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"') from dual`, + output: `select json_schema_valid('{"type":"string","pattern":"("}', '"abc"') from dual`, }, { input: `SELECT JSON_SCHEMA_VALID('{"type":"string","pattern":"("}', @a)`, - output: `select json_schema_valid('{\"type\":\"string\",\"pattern\":\"(\"}', @a) from dual`, + output: `select json_schema_valid('{"type":"string","pattern":"("}', @a) from dual`, }, { input: `SELECT JSON_SCHEMA_VALID(@b, BIN(1))`, output: `select json_schema_valid(@b, BIN(1)) from dual`, }, { input: `SELECT JSON_SCHEMA_VALID(N'{"type":"string","pattern":"("}', '"abc"')`, - output: `select json_schema_valid(N'{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"') from dual`, + output: `select json_schema_valid(N'{"type":"string","pattern":"("}', '"abc"') from dual`, /*We need to ignore this test because, after the normalizer, we change the produced NChar string into an introducer expression, so the vttablet will never see a NChar string */ ignoreNormalizerTest: true, }, { input: `SELECT JSON_SCHEMA_VALIDATION_REPORT('{"type":"string","pattern":"("}', '"abc"')`, - output: `select json_schema_validation_report('{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"') from dual`, + output: `select json_schema_validation_report('{"type":"string","pattern":"("}', '"abc"') from dual`, }, { input: `SELECT JSON_SCHEMA_VALIDATION_REPORT('{"type":"string","pattern":"("}', @a)`, - output: `select json_schema_validation_report('{\"type\":\"string\",\"pattern\":\"(\"}', @a) from dual`, + output: `select json_schema_validation_report('{"type":"string","pattern":"("}', @a) from dual`, }, { input: `SELECT JSON_SCHEMA_VALIDATION_REPORT(@b, BIN(1))`, output: `select json_schema_validation_report(@b, BIN(1)) from dual`, }, { input: `SELECT JSON_SCHEMA_VALIDATION_REPORT(N'{"type":"string","pattern":"("}', '"abc"')`, - output: `select json_schema_validation_report(N'{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"') from dual`, + output: `select json_schema_validation_report(N'{"type":"string","pattern":"("}', '"abc"') from dual`, /*We need to ignore this test because, after the normalizer, we change the produced NChar string into an introducer expression, so the vttablet will never see a NChar string */ ignoreNormalizerTest: true, }, { input: `SELECT JSON_CONTAINS('{"a": 1, "b": 2, "c": {"d": 4}}', '1')`, - output: `select json_contains('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '1') from dual`, + output: `select json_contains('{"a": 1, "b": 2, "c": {"d": 4}}', '1') from dual`, }, { input: "SELECT JSON_CONTAINS(@j, @j2)", output: "select json_contains(@j, @j2) from dual", @@ -3157,7 +3158,7 @@ var ( output: "select json_contains_path(@j, 'one', '$.a', '$.e') from dual", }, { input: `SELECT JSON_CONTAINS_PATH('{"a": 1, "b": 2, "c": {"d": 4}}', 'one', '$.a', '$.e')`, - output: `select json_contains_path('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', 'one', '$.a', '$.e') from dual`, + output: `select json_contains_path('{"a": 1, "b": 2, "c": {"d": 4}}', 'one', '$.a', '$.e') from dual`, }, { input: "SELECT JSON_CONTAINS_PATH(@j, TRIM('one'), '$.a', '$.e')", output: "select json_contains_path(@j, trim('one'), '$.a', '$.e') from dual", @@ -3172,19 +3173,19 @@ var ( output: "select c, json_extract(c, '$.id'), g from jemp where json_extract(c, '$.id') > 1 order by json_extract(c, '$.name') asc", }, { input: `SELECT JSON_EXTRACT('{"a": 1, "b": 2, "c": {"d": 4}}', '$.a', @j)`, - output: `select json_extract('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a', @j) from dual`, + output: `select json_extract('{"a": 1, "b": 2, "c": {"d": 4}}', '$.a', @j) from dual`, }, { input: "SELECT JSON_EXTRACT(@k, TRIM('abc'))", output: `select json_extract(@k, trim('abc')) from dual`, }, { input: `SELECT JSON_KEYS('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a')`, - output: `select json_keys('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a') from dual`, + output: `select json_keys('{"a": 1, "b": 2, "c": {"d": 4}}', '$.a') from dual`, }, { input: `SELECT JSON_KEYS('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}')`, - output: `select json_keys('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}') from dual`, + output: `select json_keys('{"a": 1, "b": 2, "c": {"d": 4}}') from dual`, }, { input: `SELECT JSON_OVERLAPS('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a')`, - output: `select json_overlaps('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a') from dual`, + output: `select json_overlaps('{"a": 1, "b": 2, "c": {"d": 4}}', '$.a') from dual`, }, { input: "SELECT JSON_OVERLAPS(@j, @k)", output: "select json_overlaps(@j, @k) from dual", @@ -3196,10 +3197,10 @@ var ( output: "select json_search(@j, 'one', 'abc') from dual", }, { input: `SELECT JSON_SEARCH('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', @j, BIN(2))`, - output: `select json_search('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', @j, BIN(2)) from dual`, + output: `select json_search('{"a": 1, "b": 2, "c": {"d": 4}}', @j, BIN(2)) from dual`, }, { input: `SELECT JSON_SEARCH('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', 'all', '10', NULL)`, - output: `select json_search('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', 'all', '10', null) from dual`, + output: `select json_search('{"a": 1, "b": 2, "c": {"d": 4}}', 'all', '10', null) from dual`, }, { input: "SELECT JSON_SEARCH(@j, 'all', '%b%', '', '$[3]')", output: "select json_search(@j, 'all', '%b%', '', '$[3]') from dual", @@ -3208,7 +3209,7 @@ var ( output: "select json_search(@j, 'all', '%b%', 'a', '$[3]') from dual", }, { input: `SELECT JSON_VALUE('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a')`, - output: `select json_value('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '$.a') from dual`, + output: `select json_value('{"a": 1, "b": 2, "c": {"d": 4}}', '$.a') from dual`, }, { input: `SELECT JSON_VALUE(@j, @k)`, output: `select json_value(@j, @k) from dual`, @@ -3220,40 +3221,40 @@ var ( output: `select json_value(@j, @k returning DECIMAL(4, 2)) from dual`, }, { input: `SELECT JSON_VALUE('{"fname": "Joe", "lname": "Palmer"}', '$.fname' returning char(49) Charset utf8mb4 error on error)`, - output: `select json_value('{\"fname\": \"Joe\", \"lname\": \"Palmer\"}', '$.fname' returning char(49) character set utf8mb4 error on error) from dual`, + output: `select json_value('{"fname": "Joe", "lname": "Palmer"}', '$.fname' returning char(49) character set utf8mb4 error on error) from dual`, }, { input: `SELECT JSON_VALUE('{"item": "shoes", "price": "49.95"}', '$.price' NULL ON EMPTY) `, - output: `select json_value('{\"item\": \"shoes\", \"price\": \"49.95\"}', '$.price' null on empty) from dual`, + output: `select json_value('{"item": "shoes", "price": "49.95"}', '$.price' null on empty) from dual`, }, { input: `SELECT JSON_VALUE('{"item": "shoes", "price": "49.95"}', '$.price' NULL ON ERROR) `, - output: `select json_value('{\"item\": \"shoes\", \"price\": \"49.95\"}', '$.price' null on error) from dual`, + output: `select json_value('{"item": "shoes", "price": "49.95"}', '$.price' null on error) from dual`, }, { input: `SELECT JSON_VALUE('{"item": "shoes", "price": "49.95"}', '$.price' NULL ON EMPTY ERROR ON ERROR) `, - output: `select json_value('{\"item\": \"shoes\", \"price\": \"49.95\"}', '$.price' null on empty error on error) from dual`, + output: `select json_value('{"item": "shoes", "price": "49.95"}', '$.price' null on empty error on error) from dual`, }, { input: `select json_value(@j, @k RETURNING FLOAT NULL ON EMPTY ERROR ON ERROR) from dual`, output: `select json_value(@j, @k returning FLOAT null on empty error on error) from dual`, }, { input: `SELECT 17 MEMBER OF ('[23, "abc", 17, "ab", 10]')`, - output: `select 17 member of ('[23, \"abc\", 17, \"ab\", 10]') from dual`, + output: `select 17 member of ('[23, "abc", 17, "ab", 10]') from dual`, }, { input: "SELECT @j MEMBER OF (@k)", output: "select @j member of (@k) from dual", }, { input: `SELECT 17 MEMBER OF('[23, "abc", "17", "ab", 10]'), "17" MEMBER OF('[23, "abc", 17, "ab", 10]')`, - output: `select 17 member of ('[23, \"abc\", \"17\", \"ab\", 10]'), '17' member of ('[23, \"abc\", 17, \"ab\", 10]') from dual`, + output: `select 17 member of ('[23, "abc", "17", "ab", 10]'), '17' member of ('[23, "abc", 17, "ab", 10]') from dual`, }, { input: `SELECT JSON_DEPTH('{}'), JSON_DEPTH('[]'), JSON_DEPTH('true')`, output: `select json_depth('{}'), json_depth('[]'), json_depth('true') from dual`, }, { input: `SELECT JSON_LENGTH('{"a": 1, "b": {"c": 30}}')`, - output: `select json_length('{\"a\": 1, \"b\": {\"c\": 30}}') from dual`, + output: `select json_length('{"a": 1, "b": {"c": 30}}') from dual`, }, { input: `SELECT JSON_LENGTH('{"a": 1, "b": {"c": 30}}', '$.b');`, - output: `select json_length('{\"a\": 1, \"b\": {\"c\": 30}}', '$.b') from dual`, + output: `select json_length('{"a": 1, "b": {"c": 30}}', '$.b') from dual`, }, { input: `SELECT JSON_LENGTH('{\"a\": 1, \"b\": {\"c\": 30}}', @j);`, - output: `select json_length('{\"a\": 1, \"b\": {\"c\": 30}}', @j) from dual`, + output: `select json_length('{"a": 1, "b": {"c": 30}}', @j) from dual`, }, { input: `SELECT jcol, JSON_LENGTH(jcol)`, output: `select jcol, json_length(jcol) from dual`, @@ -3265,49 +3266,49 @@ var ( output: `select json_type(json_extract(@j, '$.a[0]')) from dual`, }, { input: `SELECT JSON_VALID('{\"a\": 1}')`, - output: `select json_valid('{\"a\": 1}') from dual`, + output: `select json_valid('{"a": 1}') from dual`, }, { input: "SELECT JSON_VALID(@j)", output: "select json_valid(@j) from dual", }, { input: `SELECT JSON_ARRAY_APPEND('{ "a": 1, "b": [2, 3]}','$[1]', 'x')`, - output: `select json_array_append('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x') from dual`, + output: `select json_array_append('{ "a": 1, "b": [2, 3]}', '$[1]', 'x') from dual`, }, { input: `SELECT JSON_ARRAY_APPEND('{ "a": 1, "b": [2, 3]}','$[1]', 'x', '$[2]', 1)`, - output: `select json_array_append('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, + output: `select json_array_append('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, }, { input: `SELECT JSON_ARRAY_APPEND('{ "a": 1, "b": [2, 3]}','$[1]', 'x', @i, @j)`, - output: `select json_array_append('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, + output: `select json_array_append('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, }, { input: `SELECT JSON_ARRAY_APPEND('{ "a": 1, "b": [2, 3]}', @j, 1)`, - output: `select json_array_append('{ \"a\": 1, \"b\": [2, 3]}', @j, 1) from dual`, + output: `select json_array_append('{ "a": 1, "b": [2, 3]}', @j, 1) from dual`, }, { input: `SELECT JSON_ARRAY_APPEND('{ "a": 1, "b": [2, 3]}', '$[1]', @j)`, - output: `select json_array_append('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', @j) from dual`, + output: `select json_array_append('{ "a": 1, "b": [2, 3]}', '$[1]', @j) from dual`, }, { input: `SELECT JSON_ARRAY_APPEND('{ "a": 1, "b": [2, 3]}', @j, @k)`, - output: `select json_array_append('{ \"a\": 1, \"b\": [2, 3]}', @j, @k) from dual`, + output: `select json_array_append('{ "a": 1, "b": [2, 3]}', @j, @k) from dual`, }, { input: "SELECT JSON_ARRAY_APPEND(@i,@j,@k)", output: `select json_array_append(@i, @j, @k) from dual`, }, { input: `SELECT JSON_ARRAY_INSERT('{ "a": 1, "b": [2, 3]}','$[1]', 'x')`, - output: `select json_array_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x') from dual`, + output: `select json_array_insert('{ "a": 1, "b": [2, 3]}', '$[1]', 'x') from dual`, }, { input: `SELECT JSON_ARRAY_INSERT('{ "a": 1, "b": [2, 3]}','$[1]', 'x', '$[2]', 1)`, - output: `select json_array_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, + output: `select json_array_insert('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, }, { input: `SELECT JSON_ARRAY_INSERT('{ "a": 1, "b": [2, 3]}','$[1]', 'x', @i, @j)`, - output: `select json_array_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, + output: `select json_array_insert('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, }, { input: `SELECT JSON_ARRAY_INSERT('{ "a": 1, "b": [2, 3]}', @j, 1)`, - output: `select json_array_insert('{ \"a\": 1, \"b\": [2, 3]}', @j, 1) from dual`, + output: `select json_array_insert('{ "a": 1, "b": [2, 3]}', @j, 1) from dual`, }, { input: `SELECT JSON_ARRAY_INSERT('{ "a": 1, "b": [2, 3]}', '$[1]', @j)`, - output: `select json_array_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', @j) from dual`, + output: `select json_array_insert('{ "a": 1, "b": [2, 3]}', '$[1]', @j) from dual`, }, { input: `SELECT JSON_ARRAY_INSERT('{ "a": 1, "b": [2, 3]}', @j, @k)`, - output: `select json_array_insert('{ \"a\": 1, \"b\": [2, 3]}', @j, @k) from dual`, + output: `select json_array_insert('{ "a": 1, "b": [2, 3]}', @j, @k) from dual`, }, { input: "SELECT JSON_ARRAY_INSERT(@i,@j,@k)", output: "select json_array_insert(@i, @j, @k) from dual", @@ -3316,22 +3317,22 @@ var ( output: "select json_array_insert(@j, '$[0]', 'x', '$[2][1]', 'y') from dual", }, { input: `SELECT JSON_INSERT('{ "a": 1, "b": [2, 3]}','$[1]', 'x')`, - output: `select json_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x') from dual`, + output: `select json_insert('{ "a": 1, "b": [2, 3]}', '$[1]', 'x') from dual`, }, { input: `SELECT JSON_INSERT('{ "a": 1, "b": [2, 3]}','$[1]', 'x', '$[2]', 1)`, - output: `select json_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, + output: `select json_insert('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, }, { input: `SELECT JSON_INSERT('{ "a": 1, "b": [2, 3]}','$[1]', 'x', @i, @j)`, - output: `select json_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, + output: `select json_insert('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, }, { input: `SELECT JSON_INSERT('{ "a": 1, "b": [2, 3]}', @j, 1)`, - output: `select json_insert('{ \"a\": 1, \"b\": [2, 3]}', @j, 1) from dual`, + output: `select json_insert('{ "a": 1, "b": [2, 3]}', @j, 1) from dual`, }, { input: `SELECT JSON_INSERT('{ "a": 1, "b": [2, 3]}', '$[1]', @j)`, - output: `select json_insert('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', @j) from dual`, + output: `select json_insert('{ "a": 1, "b": [2, 3]}', '$[1]', @j) from dual`, }, { input: `SELECT JSON_INSERT('{ "a": 1, "b": [2, 3]}', @j, @k)`, - output: `select json_insert('{ \"a\": 1, \"b\": [2, 3]}', @j, @k) from dual`, + output: `select json_insert('{ "a": 1, "b": [2, 3]}', @j, @k) from dual`, }, { input: "SELECT JSON_INSERT(@i,@j,@k)", output: "select json_insert(@i, @j, @k) from dual", @@ -3340,22 +3341,22 @@ var ( output: "select json_insert(@j, '$.a', 10, '$.c', '[true, false]') from dual", }, { input: `SELECT JSON_REPLACE('{ "a": 1, "b": [2, 3]}','$[1]', 'x')`, - output: `select json_replace('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x') from dual`, + output: `select json_replace('{ "a": 1, "b": [2, 3]}', '$[1]', 'x') from dual`, }, { input: `SELECT JSON_REPLACE('{ "a": 1, "b": [2, 3]}','$[1]', 'x', '$[2]', 1)`, - output: `select json_replace('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, + output: `select json_replace('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, }, { input: `SELECT JSON_REPLACE('{ "a": 1, "b": [2, 3]}','$[1]', 'x', @i, @j)`, - output: `select json_replace('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, + output: `select json_replace('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, }, { input: `SELECT JSON_REPLACE('{ "a": 1, "b": [2, 3]}', @j, 1)`, - output: `select json_replace('{ \"a\": 1, \"b\": [2, 3]}', @j, 1) from dual`, + output: `select json_replace('{ "a": 1, "b": [2, 3]}', @j, 1) from dual`, }, { input: `SELECT JSON_REPLACE('{ "a": 1, "b": [2, 3]}', '$[1]', @j)`, - output: `select json_replace('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', @j) from dual`, + output: `select json_replace('{ "a": 1, "b": [2, 3]}', '$[1]', @j) from dual`, }, { input: `SELECT JSON_REPLACE('{ "a": 1, "b": [2, 3]}', @j, @k)`, - output: `select json_replace('{ \"a\": 1, \"b\": [2, 3]}', @j, @k) from dual`, + output: `select json_replace('{ "a": 1, "b": [2, 3]}', @j, @k) from dual`, }, { input: "SELECT JSON_REPLACE(@i,@j,@k)", output: "select json_replace(@i, @j, @k) from dual", @@ -3364,22 +3365,22 @@ var ( output: "select json_replace(@j, '$.a', 10, '$.c', '[true, false]') from dual", }, { input: `SELECT JSON_SET('{ "a": 1, "b": [2, 3]}','$[1]', 'x')`, - output: `select json_set('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x') from dual`, + output: `select json_set('{ "a": 1, "b": [2, 3]}', '$[1]', 'x') from dual`, }, { input: `SELECT JSON_SET('{ "a": 1, "b": [2, 3]}','$[1]', 'x', '$[2]', 1)`, - output: `select json_set('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, + output: `select json_set('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', '$[2]', 1) from dual`, }, { input: `SELECT JSON_SET('{ "a": 1, "b": [2, 3]}','$[1]', 'x', @i, @j)`, - output: `select json_set('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, + output: `select json_set('{ "a": 1, "b": [2, 3]}', '$[1]', 'x', @i, @j) from dual`, }, { input: `SELECT JSON_SET('{ "a": 1, "b": [2, 3]}', @j, 1)`, - output: `select json_set('{ \"a\": 1, \"b\": [2, 3]}', @j, 1) from dual`, + output: `select json_set('{ "a": 1, "b": [2, 3]}', @j, 1) from dual`, }, { input: `SELECT JSON_SET('{ "a": 1, "b": [2, 3]}', '$[1]', @j)`, - output: `select json_set('{ \"a\": 1, \"b\": [2, 3]}', '$[1]', @j) from dual`, + output: `select json_set('{ "a": 1, "b": [2, 3]}', '$[1]', @j) from dual`, }, { input: `SELECT JSON_SET('{ "a": 1, "b": [2, 3]}', @j, @k)`, - output: `select json_set('{ \"a\": 1, \"b\": [2, 3]}', @j, @k) from dual`, + output: `select json_set('{ "a": 1, "b": [2, 3]}', @j, @k) from dual`, }, { input: "SELECT JSON_SET(@i,@j,@k)", output: "select json_set(@i, @j, @k) from dual", @@ -3406,10 +3407,10 @@ var ( output: "select json_merge(@i, '[true, false]') from dual", }, { input: `SELECT JSON_MERGE_PATCH('{"name": "x"}', '{"id": 47}')`, - output: `select json_merge_patch('{\"name\": \"x\"}', '{\"id\": 47}') from dual`, + output: `select json_merge_patch('{"name": "x"}', '{"id": 47}') from dual`, }, { input: `SELECT JSON_MERGE_PATCH('{ "a": 1, "b":2 }','{ "a": 3, "c":4 }','{ "a": 5, "d":6 }');`, - output: `select json_merge_patch('{ \"a\": 1, \"b\":2 }', '{ \"a\": 3, \"c\":4 }', '{ \"a\": 5, \"d\":6 }') from dual`, + output: `select json_merge_patch('{ "a": 1, "b":2 }', '{ "a": 3, "c":4 }', '{ "a": 5, "d":6 }') from dual`, }, { input: "SELECT JSON_MERGE_PATCH('[1, 2]', '[true, false]', 'hello')", output: "select json_merge_patch('[1, 2]', '[true, false]', 'hello') from dual", @@ -3424,10 +3425,10 @@ var ( output: "select json_merge_patch(@i, '[true, false]') from dual", }, { input: `SELECT JSON_MERGE_PRESERVE('{"name": "x"}', '{"id": 47}')`, - output: `select json_merge_preserve('{\"name\": \"x\"}', '{\"id\": 47}') from dual`, + output: `select json_merge_preserve('{"name": "x"}', '{"id": 47}') from dual`, }, { input: `SELECT JSON_MERGE_PRESERVE('{ "a": 1, "b":2 }','{ "a": 3, "c":4 }','{ "a": 5, "d":6 }');`, - output: `select json_merge_preserve('{ \"a\": 1, \"b\":2 }', '{ \"a\": 3, \"c\":4 }', '{ \"a\": 5, \"d\":6 }') from dual`, + output: `select json_merge_preserve('{ "a": 1, "b":2 }', '{ "a": 3, "c":4 }', '{ "a": 5, "d":6 }') from dual`, }, { input: "SELECT JSON_MERGE_PRESERVE('[1, 2]', '[true, false]', 'hello')", output: "select json_merge_preserve('[1, 2]', '[true, false]', 'hello') from dual", @@ -3445,19 +3446,19 @@ var ( output: "select json_remove(@i, '$[1]') from dual", }, { input: `SELECT JSON_REMOVE('["a", ["b", "c"], "d"]', '$[1]')`, - output: `select json_remove('[\"a\", [\"b\", \"c\"], \"d\"]', '$[1]') from dual`, + output: `select json_remove('["a", ["b", "c"], "d"]', '$[1]') from dual`, }, { input: `SELECT JSON_REMOVE('["a", ["b", "c"], "d"]', @i)`, - output: `select json_remove('[\"a\", [\"b\", \"c\"], \"d\"]', @i) from dual`, + output: `select json_remove('["a", ["b", "c"], "d"]', @i) from dual`, }, { input: `SELECT JSON_REMOVE('["a", ["b", "c"], "d"]', @i, @j, '$[0]', '$[1]','$[2]')`, - output: `select json_remove('[\"a\", [\"b\", \"c\"], \"d\"]', @i, @j, '$[0]', '$[1]', '$[2]') from dual`, + output: `select json_remove('["a", ["b", "c"], "d"]', @i, @j, '$[0]', '$[1]', '$[2]') from dual`, }, { input: "SELECT JSON_UNQUOTE('abc')", output: "select json_unquote('abc') from dual", }, { input: `SELECT JSON_UNQUOTE('\"\\\\t\\\\u0032\"')`, - output: `select json_unquote('\"\\\\t\\\\u0032\"') from dual`, + output: `select json_unquote('"\\\\t\\\\u0032"') from dual`, }, { input: "SELECT JSON_UNQUOTE(@j)", output: "select json_unquote(@j) from dual", @@ -4537,7 +4538,8 @@ func TestSelectInto(t *testing.T) { input: "select * from t order by name limit 100 into outfile s3 'out_file_name'", output: "select * from t order by `name` asc limit 100 into outfile s3 'out_file_name'", }, { - input: `select * from TestPerson into outfile s3 's3://test-bucket/export_import/export/users.csv' fields terminated by ',' enclosed by '\"' escaped by '\\' overwrite on`, + input: `select * from TestPerson into outfile s3 's3://test-bucket/export_import/export/users.csv' fields terminated by ',' enclosed by '\"' escaped by '\\' overwrite on`, + output: `select * from TestPerson into outfile s3 's3://test-bucket/export_import/export/users.csv' fields terminated by ',' enclosed by '"' escaped by '\\' overwrite on`, }, { input: "select * from t into dumpfile 'out_file_name'", }, { @@ -5880,6 +5882,10 @@ partition by range (YEAR(purchased)) subpartition by hash (TO_DAYS(purchased)) input: "create table t (id int, info JSON, INDEX zips((CAST(info->'$.field' AS unsigned ARRAY))))", output: "create table t (\n\tid int,\n\tinfo JSON,\n\tkey zips ((cast(info -> '$.field' as unsigned array)))\n)", }, + { + input: "create table t (id int, s varchar(255) default 'foo\"bar')", + output: "create table t (\n\tid int,\n\ts varchar(255) default 'foo\"bar'\n)", + }, } parser := NewTestParser() for _, test := range createTableQueries { diff --git a/go/vt/sqlparser/testdata/select_cases.txt b/go/vt/sqlparser/testdata/select_cases.txt index 3edec6dae7e..6fedca5ddfd 100644 --- a/go/vt/sqlparser/testdata/select_cases.txt +++ b/go/vt/sqlparser/testdata/select_cases.txt @@ -4286,7 +4286,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"xt indexes"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"xt indexes\"' in boolean mode) +select * from t1 where match(a, b) against ('"xt indexes"' in boolean mode) END INPUT select max(value) from t1 AS m LEFT JOIN t2 AS c1 ON m.c1id = c1.id AND c1.active = 'Yes' LEFT JOIN t3 AS c2 ON m.c2id = c2.id AND c2.active = 'Yes' WHERE m.pid=1 AND (c1.id IS NOT NULL OR c2.id IS NOT NULL); @@ -5660,7 +5660,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"text search" "now support"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"text search\" \"now support\"' in boolean mode) +select * from t1 where match(a, b) against ('"text search" "now support"' in boolean mode) END INPUT select insert('hello', 1, -18446744073709551616, 'hi'); @@ -8048,7 +8048,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"text i"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"text i\"' in boolean mode) +select * from t1 where match(a, b) against ('"text i"' in boolean mode) END INPUT select column_name,data_type,CHARACTER_OCTET_LENGTH, CHARACTER_MAXIMUM_LENGTH from information_schema.columns where table_name='t1' order by column_name; @@ -8144,7 +8144,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"support now"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"support now\"' in boolean mode) +select * from t1 where match(a, b) against ('"support now"' in boolean mode) END INPUT select hex(inet_aton('127.1.1')); @@ -12212,7 +12212,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"text search" +"now support"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"text search\" +\"now support\"' in boolean mode) +select * from t1 where match(a, b) against ('"text search" +"now support"' in boolean mode) END INPUT select trigger_name from information_schema.triggers where event_object_table='t1'; @@ -14750,7 +14750,7 @@ INPUT select * from t1 where MATCH a,b AGAINST('"space model' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"space model' in boolean mode) +select * from t1 where match(a, b) against ('"space model' in boolean mode) END INPUT select @ujis4 = CONVERT(@utf84 USING ujis); @@ -14906,7 +14906,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"text search" -"now support"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"text search\" -\"now support\"' in boolean mode) +select * from t1 where match(a, b) against ('"text search" -"now support"' in boolean mode) END INPUT select _rowid,t1._rowid,skey,sval from t1; @@ -17612,7 +17612,7 @@ INPUT select 'aaa','aa''a',"aa""a"; END OUTPUT -select 'aaa', 'aa\'a', 'aa\"a' from dual +select 'aaa', 'aa\'a', 'aa"a' from dual END INPUT select cast('18446744073709551615' as signed); @@ -18542,7 +18542,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"now support"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"now support\"' in boolean mode) +select * from t1 where match(a, b) against ('"now support"' in boolean mode) END INPUT select date_add("1997-12-31 23:59:59",INTERVAL "10000:99:99" HOUR_SECOND); @@ -22148,7 +22148,7 @@ INPUT select * from t1 where MATCH a,b AGAINST ('"Now sUPPort"' IN BOOLEAN MODE); END OUTPUT -select * from t1 where match(a, b) against ('\"Now sUPPort\"' in boolean mode) +select * from t1 where match(a, b) against ('"Now sUPPort"' in boolean mode) END INPUT select sum(all a),count(all a),avg(all a),std(all a),variance(all a),bit_or(all a),bit_and(all a),min(all a),max(all a),min(all c),max(all c) from t1; diff --git a/go/vt/vtgate/planbuilder/testdata/select_cases.json b/go/vt/vtgate/planbuilder/testdata/select_cases.json index 38f0cb3044f..1e33194db7e 100644 --- a/go/vt/vtgate/planbuilder/testdata/select_cases.json +++ b/go/vt/vtgate/planbuilder/testdata/select_cases.json @@ -3195,8 +3195,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_quote('null'), json_quote('\\\"null\\\"'), json_object(BIN(1), 2, 'abc', ASCII(4)), json_array(1, 'abc', null, true, curtime()) from dual where 1 != 1", - "Query": "select json_quote('null'), json_quote('\\\"null\\\"'), json_object(BIN(1), 2, 'abc', ASCII(4)), json_array(1, 'abc', null, true, curtime()) from dual", + "FieldQuery": "select json_quote('null'), json_quote('\"null\"'), json_object(BIN(1), 2, 'abc', ASCII(4)), json_array(1, 'abc', null, true, curtime()) from dual where 1 != 1", + "Query": "select json_quote('null'), json_quote('\"null\"'), json_object(BIN(1), 2, 'abc', ASCII(4)), json_array(1, 'abc', null, true, curtime()) from dual", "Table": "dual" }, "TablesUsed": [ @@ -3296,8 +3296,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_schema_valid('{\\\"type\\\":\\\"string\\\",\\\"pattern\\\":\\\"(\\\"}', '\\\"abc\\\"'), json_schema_validation_report('{\\\"type\\\":\\\"string\\\",\\\"pattern\\\":\\\"(\\\"}', '\\\"abc\\\"') from dual where 1 != 1", - "Query": "select json_schema_valid('{\\\"type\\\":\\\"string\\\",\\\"pattern\\\":\\\"(\\\"}', '\\\"abc\\\"'), json_schema_validation_report('{\\\"type\\\":\\\"string\\\",\\\"pattern\\\":\\\"(\\\"}', '\\\"abc\\\"') from dual", + "FieldQuery": "select json_schema_valid('{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"'), json_schema_validation_report('{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"') from dual where 1 != 1", + "Query": "select json_schema_valid('{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"'), json_schema_validation_report('{\"type\":\"string\",\"pattern\":\"(\"}', '\"abc\"') from dual", "Table": "dual" }, "TablesUsed": [ @@ -3318,8 +3318,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_contains('{\\\"a\\\": 1, \\\"b\\\": 2, \\\"c\\\": {\\\"d\\\": 4}}', '1'), json_contains_path('{\\\"a\\\": 1, \\\"b\\\": 2, \\\"c\\\": {\\\"d\\\": 4}}', 'one', '$.a', '$.e'), json_extract('[10, 20, [30, 40]]', '$[1]'), json_unquote(json_extract('[\\\"a\\\",\\\"b\\\"]', '$[1]')), json_keys('{\\\"a\\\": 1, \\\"b\\\": {\\\"c\\\": 30}}'), json_overlaps('[1,3,5,7]', '[2,5,7]'), json_search('[\\\"abc\\\"]', 'one', 'abc'), json_value('{\\\"fname\\\": \\\"Joe\\\", \\\"lname\\\": \\\"Palmer\\\"}', '$.fname'), json_array(4, 5) member of ('[[3,4],[4,5]]') from dual where 1 != 1", - "Query": "select json_contains('{\\\"a\\\": 1, \\\"b\\\": 2, \\\"c\\\": {\\\"d\\\": 4}}', '1'), json_contains_path('{\\\"a\\\": 1, \\\"b\\\": 2, \\\"c\\\": {\\\"d\\\": 4}}', 'one', '$.a', '$.e'), json_extract('[10, 20, [30, 40]]', '$[1]'), json_unquote(json_extract('[\\\"a\\\",\\\"b\\\"]', '$[1]')), json_keys('{\\\"a\\\": 1, \\\"b\\\": {\\\"c\\\": 30}}'), json_overlaps('[1,3,5,7]', '[2,5,7]'), json_search('[\\\"abc\\\"]', 'one', 'abc'), json_value('{\\\"fname\\\": \\\"Joe\\\", \\\"lname\\\": \\\"Palmer\\\"}', '$.fname'), json_array(4, 5) member of ('[[3,4],[4,5]]') from dual", + "FieldQuery": "select json_contains('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '1'), json_contains_path('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', 'one', '$.a', '$.e'), json_extract('[10, 20, [30, 40]]', '$[1]'), json_unquote(json_extract('[\"a\",\"b\"]', '$[1]')), json_keys('{\"a\": 1, \"b\": {\"c\": 30}}'), json_overlaps('[1,3,5,7]', '[2,5,7]'), json_search('[\"abc\"]', 'one', 'abc'), json_value('{\"fname\": \"Joe\", \"lname\": \"Palmer\"}', '$.fname'), json_array(4, 5) member of ('[[3,4],[4,5]]') from dual where 1 != 1", + "Query": "select json_contains('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', '1'), json_contains_path('{\"a\": 1, \"b\": 2, \"c\": {\"d\": 4}}', 'one', '$.a', '$.e'), json_extract('[10, 20, [30, 40]]', '$[1]'), json_unquote(json_extract('[\"a\",\"b\"]', '$[1]')), json_keys('{\"a\": 1, \"b\": {\"c\": 30}}'), json_overlaps('[1,3,5,7]', '[2,5,7]'), json_search('[\"abc\"]', 'one', 'abc'), json_value('{\"fname\": \"Joe\", \"lname\": \"Palmer\"}', '$.fname'), json_array(4, 5) member of ('[[3,4],[4,5]]') from dual", "Table": "dual" }, "TablesUsed": [ @@ -3384,8 +3384,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_depth('{}'), json_length('{\\\"a\\\": 1, \\\"b\\\": {\\\"c\\\": 30}}', '$.b'), json_type(json_extract('{\\\"a\\\": [10, true]}', '$.a')), json_valid('{\\\"a\\\": 1}') from dual where 1 != 1", - "Query": "select json_depth('{}'), json_length('{\\\"a\\\": 1, \\\"b\\\": {\\\"c\\\": 30}}', '$.b'), json_type(json_extract('{\\\"a\\\": [10, true]}', '$.a')), json_valid('{\\\"a\\\": 1}') from dual", + "FieldQuery": "select json_depth('{}'), json_length('{\"a\": 1, \"b\": {\"c\": 30}}', '$.b'), json_type(json_extract('{\"a\": [10, true]}', '$.a')), json_valid('{\"a\": 1}') from dual where 1 != 1", + "Query": "select json_depth('{}'), json_length('{\"a\": 1, \"b\": {\"c\": 30}}', '$.b'), json_type(json_extract('{\"a\": [10, true]}', '$.a')), json_valid('{\"a\": 1}') from dual", "Table": "dual" }, "TablesUsed": [ @@ -3406,8 +3406,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_array_append('{\\\"a\\\": 1}', '$', 'z'), json_array_insert('[\\\"a\\\", {\\\"b\\\": [1, 2]}, [3, 4]]', '$[0]', 'x', '$[2][1]', 'y'), json_insert('{ \\\"a\\\": 1, \\\"b\\\": [2, 3]}', '$.a', 10, '$.c', cast('[true, false]' as JSON)) from dual where 1 != 1", - "Query": "select json_array_append('{\\\"a\\\": 1}', '$', 'z'), json_array_insert('[\\\"a\\\", {\\\"b\\\": [1, 2]}, [3, 4]]', '$[0]', 'x', '$[2][1]', 'y'), json_insert('{ \\\"a\\\": 1, \\\"b\\\": [2, 3]}', '$.a', 10, '$.c', cast('[true, false]' as JSON)) from dual", + "FieldQuery": "select json_array_append('{\"a\": 1}', '$', 'z'), json_array_insert('[\"a\", {\"b\": [1, 2]}, [3, 4]]', '$[0]', 'x', '$[2][1]', 'y'), json_insert('{ \"a\": 1, \"b\": [2, 3]}', '$.a', 10, '$.c', cast('[true, false]' as JSON)) from dual where 1 != 1", + "Query": "select json_array_append('{\"a\": 1}', '$', 'z'), json_array_insert('[\"a\", {\"b\": [1, 2]}, [3, 4]]', '$[0]', 'x', '$[2][1]', 'y'), json_insert('{ \"a\": 1, \"b\": [2, 3]}', '$.a', 10, '$.c', cast('[true, false]' as JSON)) from dual", "Table": "dual" }, "TablesUsed": [ @@ -3428,8 +3428,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_merge('[1, 2]', '[true, false]'), json_merge_patch('{\\\"name\\\": \\\"x\\\"}', '{\\\"id\\\": 47}'), json_merge_preserve('[1, 2]', '{\\\"id\\\": 47}') from dual where 1 != 1", - "Query": "select json_merge('[1, 2]', '[true, false]'), json_merge_patch('{\\\"name\\\": \\\"x\\\"}', '{\\\"id\\\": 47}'), json_merge_preserve('[1, 2]', '{\\\"id\\\": 47}') from dual", + "FieldQuery": "select json_merge('[1, 2]', '[true, false]'), json_merge_patch('{\"name\": \"x\"}', '{\"id\": 47}'), json_merge_preserve('[1, 2]', '{\"id\": 47}') from dual where 1 != 1", + "Query": "select json_merge('[1, 2]', '[true, false]'), json_merge_patch('{\"name\": \"x\"}', '{\"id\": 47}'), json_merge_preserve('[1, 2]', '{\"id\": 47}') from dual", "Table": "dual" }, "TablesUsed": [ @@ -3450,8 +3450,8 @@ "Name": "main", "Sharded": false }, - "FieldQuery": "select json_remove('[1, [2, 3], 4]', '$[1]'), json_replace('{ \\\"a\\\": 1, \\\"b\\\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_set('{ \\\"a\\\": 1, \\\"b\\\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_unquote('\\\"abc\\\"') from dual where 1 != 1", - "Query": "select json_remove('[1, [2, 3], 4]', '$[1]'), json_replace('{ \\\"a\\\": 1, \\\"b\\\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_set('{ \\\"a\\\": 1, \\\"b\\\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_unquote('\\\"abc\\\"') from dual", + "FieldQuery": "select json_remove('[1, [2, 3], 4]', '$[1]'), json_replace('{ \"a\": 1, \"b\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_set('{ \"a\": 1, \"b\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_unquote('\"abc\"') from dual where 1 != 1", + "Query": "select json_remove('[1, [2, 3], 4]', '$[1]'), json_replace('{ \"a\": 1, \"b\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_set('{ \"a\": 1, \"b\": [2, 3]}', '$.a', 10, '$.c', '[true, false]'), json_unquote('\"abc\"') from dual", "Table": "dual" }, "TablesUsed": [ diff --git a/go/vt/vttablet/tabletmanager/rpc_vreplication_test.go b/go/vt/vttablet/tabletmanager/rpc_vreplication_test.go index f1211c87c8f..7ab959c1e17 100644 --- a/go/vt/vttablet/tabletmanager/rpc_vreplication_test.go +++ b/go/vt/vttablet/tabletmanager/rpc_vreplication_test.go @@ -71,7 +71,7 @@ const ( getMaxValForSequence = "select max(`id`) as maxval from `vt_%s`.`%s`" initSequenceTable = "insert into %a.%a (id, next_id, cache) values (0, %d, 1000) on duplicate key update next_id = if(next_id < %d, %d, next_id)" deleteWorkflow = "delete from _vt.vreplication where db_name = 'vt_%s' and workflow = '%s'" - updatePickedSourceTablet = `update _vt.vreplication set message='Picked source tablet: cell:\"%s\" uid:%d' where id=%d` + updatePickedSourceTablet = `update _vt.vreplication set message='Picked source tablet: cell:"%s" uid:%d' where id=%d` getRowsCopied = "SELECT rows_copied FROM _vt.vreplication WHERE id=%d" hasWorkflows = "select if(count(*) > 0, 1, 0) as has_workflows from _vt.vreplication where db_name = '%s'" readAllWorkflows = "select workflow, id, source, pos, stop_pos, max_tps, max_replication_lag, cell, tablet_types, time_updated, transaction_timestamp, state, message, db_name, rows_copied, tags, time_heartbeat, workflow_type, time_throttled, component_throttled, workflow_sub_type, defer_secondary_keys, options from _vt.vreplication where db_name = '%s'%s group by workflow, id order by workflow, id" @@ -135,7 +135,7 @@ func TestCreateVReplicationWorkflow(t *testing.T) { Cells: tenv.cells, AllTables: true, }, - query: fmt.Sprintf(`%s values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"}}', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 0, '{}')`, + query: fmt.Sprintf(`%s values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"}}', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 0, '{}')`, insertVReplicationPrefix, wf, sourceKs, shard, tenv.cells[0], tenv.dbName), }, { @@ -170,7 +170,7 @@ func TestCreateVReplicationWorkflow(t *testing.T) { DeferSecondaryKeys: true, AutoStart: true, }, - query: fmt.Sprintf(`%s values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"}} on_ddl:EXEC stop_after_copy:true source_time_zone:\"EDT\" target_time_zone:\"UTC\"', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 1, '{}')`, + query: fmt.Sprintf(`%s values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"}} on_ddl:EXEC stop_after_copy:true source_time_zone:"EDT" target_time_zone:"UTC"', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 1, '{}')`, insertVReplicationPrefix, wf, sourceKs, shard, tenv.cells[0], tenv.dbName), }, { @@ -210,7 +210,7 @@ func TestCreateVReplicationWorkflow(t *testing.T) { DeferSecondaryKeys: true, AutoStart: true, }, - query: fmt.Sprintf(`%s values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"wut\" filter:\"select * from wut\"} rules:{match:\"zt\" filter:\"select * from zt\"}} on_ddl:EXEC stop_after_copy:true source_time_zone:\"EDT\" target_time_zone:\"UTC\"', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 1, '{}')`, + query: fmt.Sprintf(`%s values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"wut" filter:"select * from wut"} rules:{match:"zt" filter:"select * from zt"}} on_ddl:EXEC stop_after_copy:true source_time_zone:"EDT" target_time_zone:"UTC"', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 1, '{}')`, insertVReplicationPrefix, wf, sourceKs, shard, tenv.cells[0], tenv.dbName), }, { @@ -250,7 +250,7 @@ func TestCreateVReplicationWorkflow(t *testing.T) { DeferSecondaryKeys: true, AutoStart: true, }, - query: fmt.Sprintf(`%s values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"wut\" filter:\"select * from wut\"} rules:{match:\"zt\" filter:\"select * from zt\"}} on_ddl:EXEC stop_after_copy:true source_time_zone:\"EDT\" target_time_zone:\"UTC\"', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 1, '{}')`, + query: fmt.Sprintf(`%s values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"wut" filter:"select * from wut"} rules:{match:"zt" filter:"select * from zt"}} on_ddl:EXEC stop_after_copy:true source_time_zone:"EDT" target_time_zone:"UTC"', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 1, '{}')`, insertVReplicationPrefix, wf, sourceKs, shard, tenv.cells[0], tenv.dbName), }, } @@ -394,7 +394,7 @@ func TestMoveTables(t *testing.T) { ftc.vrdbClient.AddInvariant(getCopyStateQuery, &sqltypes.Result{}) tenv.tmc.setVReplicationExecResults(ftc.tablet, getCopyState, &sqltypes.Result{}) ftc.vrdbClient.ExpectRequest(fmt.Sprintf(readAllWorkflows, tenv.dbName, ""), &sqltypes.Result{}, nil) - insert := fmt.Sprintf(`%s values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(id, \'%s.hash\', \'%s\')\"}}', '', 0, 0, '%s', 'primary,replica,rdonly', now(), 0, 'Stopped', '%s', %d, 0, 0, '{}')`, + insert := fmt.Sprintf(`%s values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1 where in_keyrange(id, \'%s.hash\', \'%s\')"}}', '', 0, 0, '%s', 'primary,replica,rdonly', now(), 0, 'Stopped', '%s', %d, 0, 0, '{}')`, insertVReplicationPrefix, wf, sourceKs, sourceShard, targetKs, ftc.tablet.Shard, tenv.cells[0], tenv.dbName, vreplID) ftc.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: 1}, nil) ftc.vrdbClient.ExpectRequest(getAutoIncrementStep, &sqltypes.Result{}, nil) @@ -428,7 +428,7 @@ func TestMoveTables(t *testing.T) { fmt.Sprintf("%d|%s|||Stopped|", vreplID, bls), ), nil) ftc.vrdbClient.ExpectRequest(idQuery, idRes, nil) - ftc.vrdbClient.ExpectRequest(fmt.Sprintf(updateWorkflow, binlogdatapb.VReplicationWorkflowState_Running.String(), strings.Replace(bls, `"`, `\"`, -1), "", "", vreplID), &sqltypes.Result{}, nil) + ftc.vrdbClient.ExpectRequest(fmt.Sprintf(updateWorkflow, binlogdatapb.VReplicationWorkflowState_Running.String(), bls, "", "", vreplID), &sqltypes.Result{}, nil) ftc.vrdbClient.ExpectRequest(fmt.Sprintf(getVReplicationRecord, vreplID), sqltypes.MakeTestResult( sqltypes.MakeTestFields( @@ -609,7 +609,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { Cells: []string{"zone2"}, // TabletTypes is an empty value, so the current value should be cleared }, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}}', cell = '%s', tablet_types = '' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}}', cell = '%s', tablet_types = '' where id in (%d)`, keyspace, shard, "zone2", vreplID), }, { @@ -620,7 +620,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { Cells: []string{"zone3"}, TabletTypes: []topodatapb.TabletType{topodatapb.TabletType(textutil.SimulatedNullInt)}, // So keep the current value of replica }, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, keyspace, shard, "zone3", tabletTypes[0], vreplID), }, { @@ -631,7 +631,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { TabletSelectionPreference: tabletmanagerdatapb.TabletSelectionPreference_INORDER, TabletTypes: []topodatapb.TabletType{topodatapb.TabletType_RDONLY, topodatapb.TabletType_REPLICA}, }, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}}', cell = '', tablet_types = '%s' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}}', cell = '', tablet_types = '%s' where id in (%d)`, keyspace, shard, "in_order:rdonly,replica", vreplID), }, { @@ -642,7 +642,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { Cells: textutil.SimulatedNullStringSlice, // So keep the current value of zone1 TabletTypes: []topodatapb.TabletType{topodatapb.TabletType_RDONLY}, }, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, keyspace, shard, cells[0], "rdonly", vreplID), }, { @@ -652,7 +652,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { State: binlogdatapb.VReplicationWorkflowState(textutil.SimulatedNullInt), OnDdl: binlogdatapb.OnDDLAction_EXEC, }, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}} on_ddl:%s', cell = '', tablet_types = '' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}} on_ddl:%s', cell = '', tablet_types = '' where id in (%d)`, keyspace, shard, binlogdatapb.OnDDLAction_EXEC.String(), vreplID), }, { @@ -664,7 +664,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { TabletTypes: []topodatapb.TabletType{topodatapb.TabletType_RDONLY, topodatapb.TabletType_REPLICA, topodatapb.TabletType_PRIMARY}, OnDdl: binlogdatapb.OnDDLAction_EXEC_IGNORE, }, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}} on_ddl:%s', cell = '%s', tablet_types = '%s' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Running', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}} on_ddl:%s', cell = '%s', tablet_types = '%s' where id in (%d)`, keyspace, shard, binlogdatapb.OnDDLAction_EXEC_IGNORE.String(), "zone1,zone2,zone3", "rdonly,replica,primary", vreplID), }, { @@ -676,7 +676,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { TabletTypes: []topodatapb.TabletType{topodatapb.TabletType(textutil.SimulatedNullInt)}, OnDdl: binlogdatapb.OnDDLAction(textutil.SimulatedNullInt), }, - query: fmt.Sprintf(`update _vt.vreplication set state = '%s', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = '%s', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, binlogdatapb.VReplicationWorkflowState_Stopped.String(), keyspace, shard, cells[0], tabletTypes[0], vreplID), }, { @@ -689,7 +689,7 @@ func TestUpdateVReplicationWorkflow(t *testing.T) { OnDdl: binlogdatapb.OnDDLAction(textutil.SimulatedNullInt), }, isCopying: true, - query: fmt.Sprintf(`update _vt.vreplication set state = 'Copying', source = 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"corder\" filter:\"select * from corder\"} rules:{match:\"customer\" filter:\"select * from customer\"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, + query: fmt.Sprintf(`update _vt.vreplication set state = 'Copying', source = 'keyspace:"%s" shard:"%s" filter:{rules:{match:"corder" filter:"select * from corder"} rules:{match:"customer" filter:"select * from customer"}}', cell = '%s', tablet_types = '%s' where id in (%d)`, keyspace, shard, cells[0], tabletTypes[0], vreplID), }, } @@ -1033,7 +1033,7 @@ func TestSourceShardSelection(t *testing.T) { } tt.vrdbClient.ExpectRequest(fmt.Sprintf("use %s", sidecar.GetIdentifier()), &sqltypes.Result{}, nil) tt.vrdbClient.ExpectRequest( - fmt.Sprintf(`%s values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(id, \'%s.hash\', \'%s\')\"}}', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 0, '{}')`, + fmt.Sprintf(`%s values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1 where in_keyrange(id, \'%s.hash\', \'%s\')"}}', '', 0, 0, '%s', '', now(), 0, 'Stopped', '%s', 1, 0, 0, '{}')`, insertVReplicationPrefix, wf, sourceKs, sourceShard, targetKs, tt.tablet.Shard, tenv.cells[0], tenv.dbName), &sqltypes.Result{InsertID: uint64(i + 1)}, err, @@ -1117,7 +1117,7 @@ func TestFailedMoveTablesCreateCleanup(t *testing.T) { targetTablet.vrdbClient.ExpectRequest( fmt.Sprintf("%s %s", insertVReplicationPrefix, - fmt.Sprintf(`values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"%s\" filter:\"select * from %s\"}} source_time_zone:\"%s\" target_time_zone:\"UTC\"', '', 0, 0, '%s', 'primary', now(), 0, 'Stopped', '%s', 1, 0, 0, '{}')`, + fmt.Sprintf(`values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"%s" filter:"select * from %s"}} source_time_zone:"%s" target_time_zone:"UTC"', '', 0, 0, '%s', 'primary', now(), 0, 'Stopped', '%s', 1, 0, 0, '{}')`, wf, sourceKs, shard, table, table, invalidTimeZone, strings.Join(tenv.cells, ","), tenv.dbName), ), &sqltypes.Result{ @@ -1793,7 +1793,7 @@ func TestMaterializerOneToOne(t *testing.T) { // the test with an error as at this point we've tested what // we wanted to test. insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"t2\" filter:\"select * from t3\"} rules:{match:\"t4\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"t2" filter:"select * from t3"} rules:{match:"t4"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, shard, tenv.cells[0], tenv.dbName) targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{}, errShortCircuit) @@ -1858,7 +1858,7 @@ func TestMaterializerManyToOne(t *testing.T) { bls := fmt.Sprintf("keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"t2\" filter:\"select * from t3\"}}", sourceKs, sourceShard) insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"t2\" filter:\"select * from t3\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"t2" filter:"select * from t3"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, sourceShard, tenv.cells[0], tenv.dbName) if vreplID == 1 { targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: uint64(vreplID)}, nil) @@ -1954,7 +1954,7 @@ func TestMaterializerOneToMany(t *testing.T) { bls := fmt.Sprintf("keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(c1, '%s.xxhash', '%s')\"}}", sourceKs, sourceShard, targetKs, targetShard) insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(c1, \'%s.xxhash\', \'%s\')\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1 where in_keyrange(c1, \'%s.xxhash\', \'%s\')"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, sourceShard, targetKs, targetShard, tenv.cells[0], tenv.dbName) if targetShard == "-80" { targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: uint64(vreplID)}, nil) @@ -2054,7 +2054,7 @@ func TestMaterializerManyToMany(t *testing.T) { bls := fmt.Sprintf("keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(c1, '%s.xxhash', '%s')\"}}", sourceKs, sourceShard, targetKs, targetShard) insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(c1, \'%s.xxhash\', \'%s\')\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1 where in_keyrange(c1, \'%s.xxhash\', \'%s\')"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, sourceShard, targetKs, targetShard, tenv.cells[0], tenv.dbName) if targetShard == "80-" && sourceShard == "40-" { // Last insert targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: uint64(vreplID)}, errShortCircuit) @@ -2155,7 +2155,7 @@ func TestMaterializerMulticolumnVindex(t *testing.T) { bls := fmt.Sprintf("keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(c1, c2, '%s.region', '%s')\"}}", sourceKs, sourceShard, targetKs, targetShard) insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1 where in_keyrange(c1, c2, \'%s.region\', \'%s\')\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1 where in_keyrange(c1, c2, \'%s.region\', \'%s\')"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, sourceShard, targetKs, targetShard, tenv.cells[0], tenv.dbName) if targetShard == "-80" { targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: uint64(vreplID)}, nil) @@ -2239,7 +2239,7 @@ func TestMaterializerDeploySchema(t *testing.T) { // the test with an error as at this point we've tested what // we wanted to test. insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"t2\" filter:\"select * from t3\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"t2" filter:"select * from t3"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, shard, tenv.cells[0], tenv.dbName) targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{}, errShortCircuit) @@ -2309,7 +2309,7 @@ func TestMaterializerCopySchema(t *testing.T) { // the test with an error as at this point we've tested what // we wanted to test. insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"t2\" filter:\"select * from t3\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"t2" filter:"select * from t3"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, shard, tenv.cells[0], tenv.dbName) targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{}, errShortCircuit) @@ -2395,7 +2395,7 @@ func TestMaterializerExplicitColumns(t *testing.T) { bls := fmt.Sprintf("keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select c1, c1 + c2, c2 from t1 where in_keyrange(c1, c2, '%s.region', '%s')\"}}", sourceKs, sourceShard, targetKs, targetShard) insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select c1, c1 + c2, c2 from t1 where in_keyrange(c1, c2, \'%s.region\', \'%s\')\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select c1, c1 + c2, c2 from t1 where in_keyrange(c1, c2, \'%s.region\', \'%s\')"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, sourceShard, targetKs, targetShard, tenv.cells[0], tenv.dbName) if targetShard == "-80" { targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: uint64(vreplID)}, nil) @@ -2495,7 +2495,7 @@ func TestMaterializerRenamedColumns(t *testing.T) { bls := fmt.Sprintf("keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select c3 as c1, c1 + c2, c4 as c2 from t1 where in_keyrange(c3, c4, '%s.region', '%s')\"}}", sourceKs, sourceShard, targetKs, targetShard) insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select c3 as c1, c1 + c2, c4 as c2 from t1 where in_keyrange(c3, c4, \'%s.region\', \'%s\')\"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select c3 as c1, c1 + c2, c4 as c2 from t1 where in_keyrange(c3, c4, \'%s.region\', \'%s\')"}}', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, sourceShard, targetKs, targetShard, tenv.cells[0], tenv.dbName) if targetShard == "-80" { targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{InsertID: uint64(vreplID)}, nil) @@ -2570,7 +2570,7 @@ func TestMaterializerStopAfterCopy(t *testing.T) { // the test with an error as at this point we've tested what // we wanted to test. insert := insertVReplicationPrefix + - fmt.Sprintf(` values ('%s', 'keyspace:\"%s\" shard:\"%s\" filter:{rules:{match:\"t1\" filter:\"select * from t1\"} rules:{match:\"t2\" filter:\"select * from t3\"}} stop_after_copy:true', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, + fmt.Sprintf(` values ('%s', 'keyspace:"%s" shard:"%s" filter:{rules:{match:"t1" filter:"select * from t1"} rules:{match:"t2" filter:"select * from t3"}} stop_after_copy:true', '', 0, 0, '%s', 'primary,rdonly', now(), 0, 'Stopped', '%s', 0, 0, 0, '{}')`, wf, sourceKs, shard, tenv.cells[0], tenv.dbName) targetTablet.vrdbClient.ExpectRequest(insert, &sqltypes.Result{}, errShortCircuit) diff --git a/go/vt/vttablet/tabletmanager/vdiff/action_test.go b/go/vt/vttablet/tabletmanager/vdiff/action_test.go index 4676238cf69..6949b411f13 100644 --- a/go/vt/vttablet/tabletmanager/vdiff/action_test.go +++ b/go/vt/vttablet/tabletmanager/vdiff/action_test.go @@ -86,7 +86,7 @@ func TestPerformVDiffAction(t *testing.T) { query: fmt.Sprintf("select id as id from _vt.vdiff where vdiff_uuid = %s", encodeString(uuid)), }, { - query: fmt.Sprintf(`insert into _vt.vdiff(keyspace, workflow, state, options, shard, db_name, vdiff_uuid) values('', '', 'pending', '{\"picker_options\":{\"source_cell\":\"cell1,zone100_test\",\"target_cell\":\"cell1,zone100_test\"}}', '0', 'vt_vttest', %s)`, encodeString(uuid)), + query: fmt.Sprintf(`insert into _vt.vdiff(keyspace, workflow, state, options, shard, db_name, vdiff_uuid) values('', '', 'pending', '{"picker_options":{"source_cell":"cell1,zone100_test","target_cell":"cell1,zone100_test"}}', '0', 'vt_vttest', %s)`, encodeString(uuid)), }, }, postFunc: func() error { @@ -120,7 +120,7 @@ func TestPerformVDiffAction(t *testing.T) { query: fmt.Sprintf("select id as id from _vt.vdiff where vdiff_uuid = %s", encodeString(uuid)), }, { - query: fmt.Sprintf(`insert into _vt.vdiff(keyspace, workflow, state, options, shard, db_name, vdiff_uuid) values('', '', 'pending', '{\"picker_options\":{\"source_cell\":\"all\",\"target_cell\":\"all\"}}', '0', 'vt_vttest', %s)`, encodeString(uuid)), + query: fmt.Sprintf(`insert into _vt.vdiff(keyspace, workflow, state, options, shard, db_name, vdiff_uuid) values('', '', 'pending', '{"picker_options":{"source_cell":"all","target_cell":"all"}}', '0', 'vt_vttest', %s)`, encodeString(uuid)), }, }, postFunc: func() error { diff --git a/go/vt/vttablet/tabletmanager/vdiff/engine_test.go b/go/vt/vttablet/tabletmanager/vdiff/engine_test.go index e6c9a84d9e2..ef3c673cc8f 100644 --- a/go/vt/vttablet/tabletmanager/vdiff/engine_test.go +++ b/go/vt/vttablet/tabletmanager/vdiff/engine_test.go @@ -187,8 +187,8 @@ func TestVDiff(t *testing.T) { ), `fields:{name:"c1" type:INT64 table:"t1" org_table:"t1" database:"vt_customer" org_name:"c1" column_length:20 charset:63 flags:53251} rows:{lengths:1 values:"1"}|0|{}`, ), nil) - vdenv.dbClient.ExpectRequest(`update _vt.vdiff_table set rows_compared = 0, report = '{\"TableName\":\"t1\",\"ProcessedRows\":0,\"MatchingRows\":0,\"MismatchedRows\":0,\"ExtraRowsSource\":0,\"ExtraRowsTarget\":0}' where vdiff_id = 1 and table_name = 't1'`, singleRowAffected, nil) - vdenv.dbClient.ExpectRequest(`update _vt.vdiff_table set state = 'completed', rows_compared = 0, report = '{\"TableName\":\"t1\",\"ProcessedRows\":0,\"MatchingRows\":0,\"MismatchedRows\":0,\"ExtraRowsSource\":0,\"ExtraRowsTarget\":0}' where vdiff_id = 1 and table_name = 't1'`, singleRowAffected, nil) + vdenv.dbClient.ExpectRequest(`update _vt.vdiff_table set rows_compared = 0, report = '{"TableName":"t1","ProcessedRows":0,"MatchingRows":0,"MismatchedRows":0,"ExtraRowsSource":0,"ExtraRowsTarget":0}' where vdiff_id = 1 and table_name = 't1'`, singleRowAffected, nil) + vdenv.dbClient.ExpectRequest(`update _vt.vdiff_table set state = 'completed', rows_compared = 0, report = '{"TableName":"t1","ProcessedRows":0,"MatchingRows":0,"MismatchedRows":0,"ExtraRowsSource":0,"ExtraRowsTarget":0}' where vdiff_id = 1 and table_name = 't1'`, singleRowAffected, nil) vdenv.dbClient.ExpectRequest(`insert into _vt.vdiff_log(vdiff_id, message) values (1, 'completed: table \'t1\'')`, singleRowAffected, nil) vdenv.dbClient.ExpectRequest("update _vt.vdiff_table set state = 'completed' where vdiff_id = 1 and table_name = 't1'", singleRowAffected, nil) vdenv.dbClient.ExpectRequest(`insert into _vt.vdiff_log(vdiff_id, message) values (1, 'completed: table \'t1\'')`, singleRowAffected, nil) diff --git a/go/vt/vttablet/tabletmanager/vreplication/insert_generator_test.go b/go/vt/vttablet/tabletmanager/vreplication/insert_generator_test.go index 2b07308c4c2..92100429963 100644 --- a/go/vt/vttablet/tabletmanager/vreplication/insert_generator_test.go +++ b/go/vt/vttablet/tabletmanager/vreplication/insert_generator_test.go @@ -29,10 +29,10 @@ func TestInsertGenerator(t *testing.T) { ig.now = 111 ig.AddRow("b", &binlogdatapb.BinlogSource{Keyspace: "c"}, "d", "e", "f", binlogdatapb.VReplicationWorkflowType_Materialize, binlogdatapb.VReplicationWorkflowSubType_None, false) want := `insert into _vt.vreplication(workflow, source, pos, max_tps, max_replication_lag, cell, tablet_types, time_updated, transaction_timestamp, state, db_name, workflow_type, workflow_sub_type, defer_secondary_keys, options) values ` + - `('b', 'keyspace:\"c\"', 'd', 9223372036854775807, 9223372036854775807, 'e', 'f', 111, 0, 'Stopped', 'a', 0, 0, false, '{}')` + `('b', 'keyspace:"c"', 'd', 9223372036854775807, 9223372036854775807, 'e', 'f', 111, 0, 'Stopped', 'a', 0, 0, false, '{}')` assert.Equal(t, ig.String(), want) ig.AddRow("g", &binlogdatapb.BinlogSource{Keyspace: "h"}, "i", "j", "k", binlogdatapb.VReplicationWorkflowType_Reshard, binlogdatapb.VReplicationWorkflowSubType_Partial, true) - want += `, ('g', 'keyspace:\"h\"', 'i', 9223372036854775807, 9223372036854775807, 'j', 'k', 111, 0, 'Stopped', 'a', 4, 1, true, '{}')` + want += `, ('g', 'keyspace:"h"', 'i', 9223372036854775807, 9223372036854775807, 'j', 'k', 111, 0, 'Stopped', 'a', 4, 1, true, '{}')` assert.Equal(t, ig.String(), want) } diff --git a/go/vt/vttablet/tabletmanager/vreplication/journal_test.go b/go/vt/vttablet/tabletmanager/vreplication/journal_test.go index 18dbe1e7fd8..862f22ab564 100644 --- a/go/vt/vttablet/tabletmanager/vreplication/journal_test.go +++ b/go/vt/vttablet/tabletmanager/vreplication/journal_test.go @@ -70,7 +70,7 @@ func TestJournalOneToOne(t *testing.T) { expectDBClientQueries(t, qh.Expect( "begin", - `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:\\"other_keyspace\\" shard:\\"0\\.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:1-10'`, + `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:"other_keyspace" shard:"0.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:1-10'`, fmt.Sprintf("delete from _vt.vreplication where id=%d", firstID), "commit", "/update _vt.vreplication set message='Picked source tablet.*", @@ -133,8 +133,8 @@ func TestJournalOneToMany(t *testing.T) { expectDBClientQueries(t, qh.Expect( "begin", - `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:\\"other_keyspace\\" shard:\\"-80\\.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:1-5'`, - `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:\\"other_keyspace\\" shard:\\"80-\\.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:5-10'`, + `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:"other_keyspace" shard:"-80.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:1-5'`, + `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:"other_keyspace" shard:"80-.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:5-10'`, fmt.Sprintf("delete from _vt.vreplication where id=%d", firstID), "commit", "/update _vt.vreplication set message='Picked source tablet.*", @@ -194,7 +194,7 @@ func TestJournalTablePresent(t *testing.T) { expectDBClientQueries(t, qh.Expect( "begin", - `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:\\"other_keyspace\\" shard:\\"0\\.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:1-10'`, + `/insert into _vt.vreplication.*workflow, source, pos.*values.*'test', 'keyspace:"other_keyspace" shard:"0.*'MySQL56/7b04699f-f5e9-11e9-bf88-9cb6d089e1c3:1-10'`, fmt.Sprintf("delete from _vt.vreplication where id=%d", firstID), "commit", "/update _vt.vreplication set message='Picked source tablet.*", diff --git a/go/vt/vttablet/tabletmanager/vreplication/vcopier_test.go b/go/vt/vttablet/tabletmanager/vreplication/vcopier_test.go index 9cbb51e76c6..fda9012c1b5 100644 --- a/go/vt/vttablet/tabletmanager/vreplication/vcopier_test.go +++ b/go/vt/vttablet/tabletmanager/vreplication/vcopier_test.go @@ -176,10 +176,10 @@ func testPlayerCopyCharPK(t *testing.T) { "/insert into _vt.copy_state", "/update _vt.vreplication set state='Copying'", "insert into dst(idc,val) values ('a\\0',1)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"idc\\" type:BINARY charset:63 flags:20611} rows:{lengths:2 values:\\"a\\\\x00\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"idc" type:BINARY charset:63 flags:20611} rows:{lengths:2 values:"a\\\\x00"}'.*`, `update dst set val=3 where idc='a\0' and ('a\0') <= ('a\0')`, "insert into dst(idc,val) values ('c\\0',2)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"idc\\" type:BINARY charset:63 flags:20611} rows:{lengths:2 values:\\"c\\\\x00\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"idc" type:BINARY charset:63 flags:20611} rows:{lengths:2 values:"c\\\\x00"}'.*`, "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst", "/update _vt.vreplication set state='Running", ), recvTimeout) @@ -283,7 +283,7 @@ func testPlayerCopyVarcharPKCaseInsensitive(t *testing.T) { "/update _vt.vreplication set state='Copying'", // Copy mode. "insert into dst(idc,val) values ('a',1)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"idc\\" type:VARCHAR charset:33 flags:20483} rows:{lengths:1 values:\\"a\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"idc" type:VARCHAR charset:33 flags:20483} rows:{lengths:1 values:"a"}'.*`, // Copy-catchup mode. `/insert into dst\(idc,val\) select 'B', 3 from dual where \( .* 'B' COLLATE .* \) <= \( .* 'a' COLLATE .* \)`, ).Then(func(expect qh.ExpectationSequencer) qh.ExpectationSequencer { @@ -293,11 +293,11 @@ func testPlayerCopyVarcharPKCaseInsensitive(t *testing.T) { //upd1 := expect. upd1 := expect.Then(qh.Eventually( "insert into dst(idc,val) values ('B',3)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"idc\\" type:VARCHAR charset:33 flags:20483} rows:{lengths:1 values:\\"B\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"idc" type:VARCHAR charset:33 flags:20483} rows:{lengths:1 values:"B"}'.*`, )) upd2 := expect.Then(qh.Eventually( "insert into dst(idc,val) values ('c',2)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"idc\\" type:VARCHAR charset:33 flags:20483} rows:{lengths:1 values:\\"c\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"idc" type:VARCHAR charset:33 flags:20483} rows:{lengths:1 values:"c"}'.*`, )) upd1.Then(upd2.Eventually()) return upd2 @@ -406,12 +406,12 @@ func testPlayerCopyVarcharCompositePKCaseSensitiveCollation(t *testing.T) { "/update _vt.vreplication set state='Copying'", // Copy mode. "insert into dst(id,idc,idc2,val) values (1,'a','a',1)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} fields:{name:\\"idc\\" type:VARBINARY charset:63 flags:20611} fields:{name:\\"idc2\\" type:VARBINARY charset:63 flags:20611} rows:{lengths:1 lengths:1 lengths:1 values:\\"1aa\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} fields:{name:"idc" type:VARBINARY charset:63 flags:20611} fields:{name:"idc2" type:VARBINARY charset:63 flags:20611} rows:{lengths:1 lengths:1 lengths:1 values:"1aa"}'.*`, // Copy-catchup mode. `insert into dst(id,idc,idc2,val) select 1, 'B', 'B', 3 from dual where (1,'B','B') <= (1,'a','a')`, // Copy mode. "insert into dst(id,idc,idc2,val) values (1,'c','c',2)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} fields:{name:\\"idc\\" type:VARBINARY charset:63 flags:20611} fields:{name:\\"idc2\\" type:VARBINARY charset:63 flags:20611} rows:{lengths:1 lengths:1 lengths:1 values:\\"1cc\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} fields:{name:"idc" type:VARBINARY charset:63 flags:20611} fields:{name:"idc2" type:VARBINARY charset:63 flags:20611} rows:{lengths:1 lengths:1 lengths:1 values:"1cc"}'.*`, // Wrap-up. "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst", "/update _vt.vreplication set state='Running'", @@ -495,7 +495,7 @@ func testPlayerCopyTablesWithFK(t *testing.T) { // Inserts may happen out-of-order. Update happen in-order. "begin", "insert into dst1(id,id2) values (1,1), (2,2)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( "set @@session.foreign_key_checks=0", @@ -516,7 +516,7 @@ func testPlayerCopyTablesWithFK(t *testing.T) { // copy dst2 "begin", "insert into dst2(id,id2) values (1,21), (2,22)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( "set @@session.foreign_key_checks=0", @@ -610,7 +610,7 @@ func testPlayerCopyTables(t *testing.T) { expectDBClientQueries(t, qh.Expect( // Filters should be lexicographically ordered by name. - regexp.QuoteMeta("/insert into _vt.vreplication (workflow, source, pos, max_tps, max_replication_lag, time_updated, transaction_timestamp, state, db_name, workflow_type, workflow_sub_type, options) values ('test', 'keyspace:\\\"vttest\\\" shard:\\\"0\\\" filter:{rules:{match:\\\"ast1\\\" filter:\\\"select * from ast1\\\"} rules:{match:\\\"dst1\\\" filter:\\\"select id, val, val as val2, d, j from src1\\\"} rules:{match:\\\"/yes\\\"}}'"), + regexp.QuoteMeta("/insert into _vt.vreplication (workflow, source, pos, max_tps, max_replication_lag, time_updated, transaction_timestamp, state, db_name, workflow_type, workflow_sub_type, options) values ('test', 'keyspace:\"vttest\" shard:\"0\" filter:{rules:{match:\"ast1\" filter:\"select * from ast1\"} rules:{match:\"dst1\" filter:\"select id, val, val as val2, d, j from src1\"} rules:{match:\"/yes\"}}'"), "/update _vt.vreplication set message='Picked source tablet.*", // Create the list of tables to copy and transition to Copying state. "begin", @@ -629,7 +629,7 @@ func testPlayerCopyTables(t *testing.T) { "commit", "begin", "insert into dst1(id,val,val2,d,j) values (1,'aaa','aaa',0,JSON_ARRAY(123456789012345678901234567890, _utf8mb4'abcd')), (2,'bbb','bbb',1,JSON_OBJECT(_utf8mb4'foo', _utf8mb4'bar')), (3,'ccc','ccc',2,CAST(_utf8mb4'null' as JSON)), (4,'ddd','ddd',3,JSON_OBJECT(_utf8mb4'name', _utf8mb4'matt', _utf8mb4'size', null)), (5,'eee','eee',4,null)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"5\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"5"}'.*`, "commit", // copy of dst1 is done: delete from copy_state. "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst1", @@ -765,7 +765,7 @@ func testPlayerCopyBigTable(t *testing.T) { // The first fast-forward has no starting point. So, it just saves the current position. "/update _vt.vreplication set state='Copying'", "insert into dst(id,val) values (1,'aaa')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"1\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"1"}'.*`, // The next catchup executes the new row insert, but will be a no-op. "insert into dst(id,val) select 3, 'ccc' from dual where (3) <= (1)", // fastForward has nothing to add. Just saves position. @@ -775,12 +775,12 @@ func testPlayerCopyBigTable(t *testing.T) { ).Then(func(expect qh.ExpectationSequencer) qh.ExpectationSequencer { ins1 := expect.Then(qh.Eventually("insert into dst(id,val) values (2,'bbb')")) upd1 := ins1.Then(qh.Eventually( - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, )) // Third row copied without going back to catchup state. ins3 := expect.Then(qh.Eventually("insert into dst(id,val) values (3,'ccc')")) upd3 := ins3.Then(qh.Eventually( - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"3\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"3"}'.*`, )) upd1.Then(upd3.Eventually()) return upd3 @@ -894,7 +894,7 @@ func testPlayerCopyWildcardRule(t *testing.T) { "/update _vt.vreplication set state='Copying'", // The first fast-forward has no starting point. So, it just saves the current position. "insert into src(id,val) values (1,'aaa')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"1\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"1"}'.*`, // The next catchup executes the new row insert, but will be a no-op. "insert into src(id,val) select 3, 'ccc' from dual where (3) <= (1)", // fastForward has nothing to add. Just saves position. @@ -904,12 +904,12 @@ func testPlayerCopyWildcardRule(t *testing.T) { ).Then(func(expect qh.ExpectationSequencer) qh.ExpectationSequencer { ins1 := expect.Then(qh.Eventually("insert into src(id,val) values (2,'bbb')")) upd1 := ins1.Then(qh.Eventually( - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, )) // Third row copied without going back to catchup state. ins3 := expect.Then(qh.Eventually("insert into src(id,val) values (3,'ccc')")) upd3 := ins3.Then(qh.Eventually( - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"3\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"3"}'.*`, )) upd1.Then(upd3.Eventually()) return upd3 @@ -1068,13 +1068,13 @@ func testPlayerCopyTableContinuation(t *testing.T) { ).Then(qh.Immediately( "insert into dst1(id,val) values (7,'insert out'), (8,'no change'), (10,'updated'), (12,'move out')", )).Then(qh.Eventually( - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id1\\" type:INT32 charset:63 flags:53251} fields:{name:\\"id2\\" type:INT32 charset:63 flags:53251} rows:{lengths:2 lengths:1 values:\\"126\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id1" type:INT32 charset:63 flags:53251} fields:{name:"id2" type:INT32 charset:63 flags:53251} rows:{lengths:2 lengths:1 values:"126"}'.*`, )).Then(qh.Immediately( "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst1", "insert into not_copied(id,val) values (1,'bbb')", )).Then(qh.Eventually( // Copy again. There should be no events for catchup. - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\\"id\\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\\"1\\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"1"}'.*`, )).Then(qh.Immediately( "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*not_copied", "/update _vt.vreplication set state='Running'", @@ -1390,7 +1390,7 @@ func testPlayerCopyTablesStopAfterCopy(t *testing.T) { ).Then(qh.Eventually( "begin", "insert into dst1(id,val) values (1,'aaa'), (2,'bbb')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( // copy of dst1 is done: delete from copy_state. @@ -1478,7 +1478,7 @@ func testPlayerCopyTablesGIPK(t *testing.T) { ).Then(qh.Eventually( "begin", "insert into dst1(my_row_id,val) values (1,'aaa'), (2,'bbb')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"my_row_id\\" type:UINT64 charset:63 flags:49699} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"my_row_id" type:UINT64 charset:63 flags:49699} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( // copy of dst1 is done: delete from copy_state. @@ -1489,7 +1489,7 @@ func testPlayerCopyTablesGIPK(t *testing.T) { "commit", "begin", "insert into dst2(my_row_id,val) values (1,'aaa'), (2,'bbb')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"my_row_id\\" type:UINT64 charset:63 flags:49699} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"my_row_id" type:UINT64 charset:63 flags:49699} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( // copy of dst2 is done: delete from copy_state. @@ -1578,7 +1578,7 @@ func testPlayerCopyTableCancel(t *testing.T) { ).Then(qh.Eventually( "begin", "insert into dst1(id,val) values (1,'aaa'), (2,'bbb')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( // copy of dst1 is done: delete from copy_state. @@ -1651,11 +1651,11 @@ func testPlayerCopyTablesWithGeneratedColumn(t *testing.T) { "/update _vt.vreplication set state", // The first fast-forward has no starting point. So, it just saves the current position. "insert into dst1(id,val,val3,id2) values (1,'aaa','aaa1',10), (2,'bbb','bbb2',20)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, // copy of dst1 is done: delete from copy_state. "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst1", "insert into dst2(val3,val,id2) values ('aaa1','aaa',10), ('bbb2','bbb',20)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, // copy of dst2 is done: delete from copy_state. "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst2", "/update _vt.vreplication set state", @@ -1732,7 +1732,7 @@ func testCopyTablesWithInvalidDates(t *testing.T) { ).Then(qh.Eventually( "begin", "insert into dst1(id,dt) values (1,'2020-01-12'), (2,'0000-00-00')", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:\\"2\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} rows:{lengths:1 values:"2"}'.*`, "commit", )).Then(qh.Immediately( // copy of dst1 is done: delete from copy_state. @@ -1822,7 +1822,7 @@ func testCopyInvisibleColumns(t *testing.T) { "/update _vt.vreplication set state='Copying'", // The first fast-forward has no starting point. So, it just saves the current position. "insert into dst1(id,id2,inv1,inv2) values (1,10,100,1000), (2,20,200,2000)", - `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:\\"id\\" type:INT32 charset:63 flags:53251} fields:{name:\\"inv1\\" type:INT32 charset:63 flags:53251} rows:{lengths:1 lengths:3 values:\\"2200\\"}'.*`, + `/insert into _vt.copy_state \(lastpk, vrepl_id, table_name\) values \('fields:{name:"id" type:INT32 charset:63 flags:53251} fields:{name:"inv1" type:INT32 charset:63 flags:53251} rows:{lengths:1 lengths:3 values:"2200"}'.*`, // copy of dst1 is done: delete from copy_state. "/delete cs, pca from _vt.copy_state as cs left join _vt.post_copy_action as pca on cs.vrepl_id=pca.vrepl_id and cs.table_name=pca.table_name.*dst1", "/update _vt.vreplication set state='Running'", diff --git a/go/vt/wrangler/materializer_test.go b/go/vt/wrangler/materializer_test.go index a506d52d511..1728ba6efc2 100644 --- a/go/vt/wrangler/materializer_test.go +++ b/go/vt/wrangler/materializer_test.go @@ -1929,11 +1929,11 @@ func TestMaterializerOneToOne(t *testing.T) { insertPrefix+ `\(`+ `'workflow', `+ - (`'keyspace:\\"sourceks\\" shard:\\"0\\" `+ + (`'keyspace:"sourceks" shard:"0" `+ `filter:{`+ - `rules:{match:\\"t1\\" filter:\\"select.*t1\\"} `+ - `rules:{match:\\"t2\\" filter:\\"select.*t3\\"} `+ - `rules:{match:\\"t4\\"}`+ + `rules:{match:"t1" filter:"select.*t1"} `+ + `rules:{match:"t2" filter:"select.*t3"} `+ + `rules:{match:"t4"}`+ `}', `)+ `'', [0-9]*, [0-9]*, 'zone1', 'primary,rdonly', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'`+ `\)`+eol, @@ -1968,9 +1968,9 @@ func TestMaterializerManyToOne(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('workflow', 'keyspace:\\"sourceks\\" shard:\\"-80\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1\\"} rules:{match:\\"t2\\" filter:\\"select.*t3\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ + `\('workflow', 'keyspace:"sourceks" shard:"-80" filter:{rules:{match:"t1" filter:"select.*t1"} rules:{match:"t2" filter:"select.*t3"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ `, `+ - `\('workflow', 'keyspace:\\"sourceks\\" shard:\\"80-\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1\\"} rules:{match:\\"t2\\" filter:\\"select.*t3\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ + `\('workflow', 'keyspace:"sourceks" shard:"80-" filter:{rules:{match:"t1" filter:"select.*t1"} rules:{match:"t2" filter:"select.*t3"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -2021,13 +2021,13 @@ func TestMaterializerOneToMany(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*-80.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*-80.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*80-.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*80-.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery(200, mzUpdateQuery, &sqltypes.Result{}) @@ -2078,15 +2078,15 @@ func TestMaterializerManyToMany(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `.*shard:\\"-40\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*-80.*`+ - `.*shard:\\"40-\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*-80.*`, + `.*shard:"-40" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*-80.*`+ + `.*shard:"40-" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*-80.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `.*shard:\\"-40\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*80-.*`+ - `.*shard:\\"40-\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*80-.*`, + `.*shard:"-40" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*80-.*`+ + `.*shard:"40-" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1.*targetks\.xxhash.*80-.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery(200, mzUpdateQuery, &sqltypes.Result{}) @@ -2139,13 +2139,13 @@ func TestMaterializerMulticolumnVindex(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*-80.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*-80.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*80-.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*80-.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery(200, mzUpdateQuery, &sqltypes.Result{}) @@ -2181,7 +2181,7 @@ func TestMaterializerDeploySchema(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('workflow', 'keyspace:\\"sourceks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1\\"} rules:{match:\\"t2\\" filter:\\"select.*t3\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ + `\('workflow', 'keyspace:"sourceks" shard:"0" filter:{rules:{match:"t1" filter:"select.*t1"} rules:{match:"t2" filter:"select.*t3"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -2219,7 +2219,7 @@ func TestMaterializerCopySchema(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('workflow', 'keyspace:\\"sourceks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1\\"} rules:{match:\\"t2\\" filter:\\"select.*t3\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ + `\('workflow', 'keyspace:"sourceks" shard:"0" filter:{rules:{match:"t1" filter:"select.*t1"} rules:{match:"t2" filter:"select.*t3"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_targetks', 0, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -2276,13 +2276,13 @@ func TestMaterializerExplicitColumns(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*-80.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*-80.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*80-.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c1, c2.*targetks\.region.*80-.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery(200, mzUpdateQuery, &sqltypes.Result{}) @@ -2336,13 +2336,13 @@ func TestMaterializerRenamedColumns(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c3, c4.*targetks\.region.*-80.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c3, c4.*targetks\.region.*-80.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `.*shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(c3, c4.*targetks\.region.*80-.*`, + `.*shard:"0" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(c3, c4.*targetks\.region.*80-.*`, &sqltypes.Result{}, ) env.tmc.expectVRQuery(200, mzUpdateQuery, &sqltypes.Result{}) @@ -2863,7 +2863,7 @@ func TestMaterializerSourceShardSelection(t *testing.T) { } getStreamInsert := func(sourceShard, sourceColumn, targetVindex, targetShard string) string { - return fmt.Sprintf(`.*shard:\\"%s\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1 where in_keyrange\(%s.*targetks\.%s.*%s.*`, sourceShard, sourceColumn, targetVindex, targetShard) + return fmt.Sprintf(`.*shard:"%s" filter:{rules:{match:"t1" filter:"select.*t1 where in_keyrange\(%s.*targetks\.%s.*%s.*`, sourceShard, sourceColumn, targetVindex, targetShard) } targetVSchema := &vschemapb.Keyspace{ @@ -3056,7 +3056,7 @@ func TestMaterializerSourceShardSelection(t *testing.T) { // The single target shard streams all data from each source shard // without any keyrange filtering. getStreamInsert: func(sourceShard, _, _, targetShard string) string { - return fmt.Sprintf(`.*shard:\\"%s\\" filter:{rules:{match:\\"t1\\" filter:\\"select.*t1`, sourceShard) + return fmt.Sprintf(`.*shard:"%s" filter:{rules:{match:"t1" filter:"select.*t1`, sourceShard) }, }, { @@ -3611,10 +3611,8 @@ func TestKeyRangesEqualOptimization(t *testing.T) { } blsBytes, err := prototext.Marshal(bls) require.NoError(t, err, "failed to marshal binlog source: %v", err) - // This is also escaped in the SQL statement. - blsStr := strings.ReplaceAll(string(blsBytes), `"`, `\"`) // Escape the string for the regexp comparison. - blsStr = regexp.QuoteMeta(blsStr) + blsStr := regexp.QuoteMeta(string(blsBytes)) // For some reason we end up with an extra slash added by QuoteMeta for the // escaped single quotes in the filter. blsStr = strings.ReplaceAll(blsStr, `\\\\`, `\\\`) diff --git a/go/vt/wrangler/resharder_test.go b/go/vt/wrangler/resharder_test.go index b4a939775ca..4c47b3ebf14 100644 --- a/go/vt/wrangler/resharder_test.go +++ b/go/vt/wrangler/resharder_test.go @@ -93,14 +93,14 @@ func TestResharderOneToMany(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '`+ tc.cells+`', '`+tc.tabletTypes+`', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"/.*\\" filter:\\"80-\\"}}', '', [0-9]*, [0-9]*, '`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"/.*" filter:"80-"}}', '', [0-9]*, [0-9]*, '`+ tc.cells+`', '`+tc.tabletTypes+`', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+eol, &sqltypes.Result{}, ) @@ -137,8 +137,8 @@ func TestResharderManyToOne(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"-80\\" filter:{rules:{match:\\"/.*\\" filter:\\"-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"80-\\" filter:{rules:{match:\\"/.*\\" filter:\\"-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"-80" filter:{rules:{match:"/.*" filter:"-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ + `\('resharderTest', 'keyspace:"ks" shard:"80-" filter:{rules:{match:"/.*" filter:"-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -172,15 +172,15 @@ func TestResharderManyToMany(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"-40\\" filter:{rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"40-\\" filter:{rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"-40" filter:{rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ + `\('resharderTest', 'keyspace:"ks" shard:"40-" filter:{rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"40-\\" filter:{rules:{match:\\"/.*\\" filter:\\"80-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"40-" filter:{rules:{match:"/.*" filter:"80-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -228,14 +228,14 @@ func TestResharderOneRefTable(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"exclude\\"} rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"t1" filter:"exclude"} rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"exclude\\"} rules:{match:\\"/.*\\" filter:\\"80-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"t1" filter:"exclude"} rules:{match:"/.*" filter:"80-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -283,14 +283,14 @@ func TestReshardStopFlags(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"exclude\\"} rules:{match:\\"/.*\\" filter:\\"-80\\"}} stop_after_copy:true', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"t1" filter:"exclude"} rules:{match:"/.*" filter:"-80"}} stop_after_copy:true', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"exclude\\"} rules:{match:\\"/.*\\" filter:\\"80-\\"}} stop_after_copy:true', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"t1" filter:"exclude"} rules:{match:"/.*" filter:"80-"}} stop_after_copy:true', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -347,18 +347,18 @@ func TestResharderOneRefStream(t *testing.T) { ) env.tmc.expectVRQuery(100, fmt.Sprintf("select workflow, source, cell, tablet_types from _vt.vreplication where db_name='vt_%s' and message != 'FROZEN'", env.keyspace), result) - refRow := `\('t1', 'keyspace:\\"ks1\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\"}}', '', [0-9]*, [0-9]*, 'cell1', 'primary,replica', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)` + refRow := `\('t1', 'keyspace:"ks1" shard:"0" filter:{rules:{match:"t1"}}', '', [0-9]*, [0-9]*, 'cell1', 'primary,replica', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)` env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"exclude\\"} rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"t1" filter:"exclude"} rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ refRow+eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"t1\\" filter:\\"exclude\\"} rules:{match:\\"/.*\\" filter:\\"80-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"t1" filter:"exclude"} rules:{match:"/.*" filter:"80-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\).*`+ refRow+eol, &sqltypes.Result{}, ) @@ -430,14 +430,14 @@ func TestResharderNoRefStream(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"/.*\\" filter:\\"80-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"/.*" filter:"80-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) @@ -472,14 +472,14 @@ func TestResharderCopySchema(t *testing.T) { env.tmc.expectVRQuery( 200, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"/.*\\" filter:\\"-80\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"/.*" filter:"-80"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) env.tmc.expectVRQuery( 210, insertPrefix+ - `\('resharderTest', 'keyspace:\\"ks\\" shard:\\"0\\" filter:{rules:{match:\\"/.*\\" filter:\\"80-\\"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ + `\('resharderTest', 'keyspace:"ks" shard:"0" filter:{rules:{match:"/.*" filter:"80-"}}', '', [0-9]*, [0-9]*, '', '', [0-9]*, 0, 'Stopped', 'vt_ks', 4, 0, false, '{}'\)`+ eol, &sqltypes.Result{}, ) diff --git a/go/vt/wrangler/traffic_switcher_test.go b/go/vt/wrangler/traffic_switcher_test.go index e1ae1ce908f..5bc336f634e 100644 --- a/go/vt/wrangler/traffic_switcher_test.go +++ b/go/vt/wrangler/traffic_switcher_test.go @@ -838,8 +838,8 @@ func testTableMigrateOneToMany(t *testing.T, keepData, keepRoutingRules bool) { createReverseVReplication := func() { deleteReverseReplication() - tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*-80.*t1.*from `+"`"+"t1`"+`\\".*t2.*from `+"`"+"t2`"+`\\"`, &sqltypes.Result{InsertID: 1}, nil) - tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*80-.*t1.*from `+"`"+"t1`"+`\\".*t2.*from `+"`"+"t2`"+`\\"`, &sqltypes.Result{InsertID: 2}, nil) + tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*-80.*t1.*from `+"`"+"t1`"+`".*t2.*from `+"`"+"t2`"+`"`, &sqltypes.Result{InsertID: 1}, nil) + tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*80-.*t1.*from `+"`"+"t1`"+`".*t2.*from `+"`"+"t2`"+`"`, &sqltypes.Result{InsertID: 2}, nil) tme.dbSourceClients[0].addQuery("select * from _vt.vreplication where id = 1", stoppedResult(1), nil) tme.dbSourceClients[0].addQuery("select * from _vt.vreplication where id = 2", stoppedResult(2), nil) } @@ -1072,8 +1072,8 @@ func TestTableMigrateOneToManyDryRun(t *testing.T) { createReverseVReplication := func() { deleteReverseReplicaion() - tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*-80.*t1.*from t1\\".*t2.*from t2\\"`, &sqltypes.Result{InsertID: 1}, nil) - tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*80-.*t1.*from t1\\".*t2.*from t2\\"`, &sqltypes.Result{InsertID: 2}, nil) + tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*-80.*t1.*from t1".*t2.*from t2"`, &sqltypes.Result{InsertID: 1}, nil) + tme.dbSourceClients[0].addQueryRE(`insert into _vt.vreplication.*test_reverse.*ks2.*80-.*t1.*from t1".*t2.*from t2"`, &sqltypes.Result{InsertID: 2}, nil) tme.dbSourceClients[0].addQuery("select * from _vt.vreplication where id = 1", stoppedResult(1), nil) tme.dbSourceClients[0].addQuery("select * from _vt.vreplication where id = 2", stoppedResult(2), nil) }