diff --git a/.travis.yml b/.travis.yml
index 3560bc35079..c2a648d1351 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -13,6 +13,7 @@ env:
- MAVEN_CLI: '"clean install -B -pl debezium-connector-mongodb -am -Passembly -Dformat.formatter.goal=validate -Dformat.imports.goal=check -Dversion.mongo.server=4.0 -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"'
- MAVEN_CLI: '"clean install -B -pl debezium-connector-mongodb -am -Passembly -Dformat.formatter.goal=validate -Dformat.imports.goal=check -Dversion.mongo.server=3.2 -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"'
- MAVEN_CLI: '"clean install -B -pl debezium-quarkus-outbox -U -am -amd -Passembly -Dformat.formatter.goal=validate -Dformat.imports.goal=check -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"'
+ - MAVEN_CLI: '"clean install -B -pl debezium-server -U -am -Dformat.formatter.goal=validate -Dformat.imports.goal=check -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"'
- MAVEN_CLI: '"clean install -B -pl debezium-testing -am -amd -Passembly -Dformat.formatter.goal=validate -Dformat.imports.goal=check -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"'
sudo: required
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d0b84f6890b..b62c0768598 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,51 @@
All notable changes are documented in this file. Release numbers follow [Semantic Versioning](http://semver.org)
+## 1.2.0.Beta2
+May 19th, 2020 [Detailed release notes](https://issues.redhat.com/secure/ReleaseNote.jspa?projectId=12317320&version=12345708)
+
+### New features since 1.2.0.Beta1
+
+* Add JDBC driver versions to docs [DBZ-2031](https://issues.jboss.org/browse/DBZ-2031)
+* Add a few more loggings for Cassandra Connector [DBZ-2066](https://issues.jboss.org/browse/DBZ-2066)
+* Provide ready-to-use standalone application based on the embedded engine [DBZ-651](https://issues.jboss.org/browse/DBZ-651)
+* Add option to skip LSN timestamp queries [DBZ-1988](https://issues.jboss.org/browse/DBZ-1988)
+* Add option to logical topic router for controlling placement of table information [DBZ-2034](https://issues.jboss.org/browse/DBZ-2034)
+* Add headers and topic name into scripting transforms [DBZ-2074](https://issues.jboss.org/browse/DBZ-2074)
+* Filter and content-based router SMTs should be restrictable to certain topics [DBZ-2024](https://issues.jboss.org/browse/DBZ-2024)
+
+
+### Breaking changes since 1.2.0.Beta1
+
+* Remove deprecated features [DBZ-1828](https://issues.jboss.org/browse/DBZ-1828)
+* Db2: Replace `initial_schema_only` with `schema_only` [DBZ-2051](https://issues.jboss.org/browse/DBZ-2051)
+* DebeziumContainer should allow for custom container images [DBZ-2070](https://issues.jboss.org/browse/DBZ-2070)
+
+
+### Fixes since 1.2.0.Beta1
+
+* Avro schema doesn't change if a column default value changes from 'foo' to 'bar' [DBZ-2061](https://issues.jboss.org/browse/DBZ-2061)
+* DDL statement throws error if compression keyword contains backticks (``) [DBZ-2062](https://issues.jboss.org/browse/DBZ-2062)
+* Error and connector stops when DDL contains algorithm=instant [DBZ-2067](https://issues.jboss.org/browse/DBZ-2067)
+* Debezium Engine advanced record consuming example broken [DBZ-2073](https://issues.jboss.org/browse/DBZ-2073)
+* Unable to parse MySQL ALTER statement with named primary key [DBZ-2080](https://issues.jboss.org/browse/DBZ-2080)
+* Missing schema-serializer dependency for Avro [DBZ-2082](https://issues.jboss.org/browse/DBZ-2082)
+* TinyIntOneToBooleanConverter doesn't seem to work with columns having a default value. [DBZ-2085](https://issues.jboss.org/browse/DBZ-2085)
+
+
+### Other changes since 1.2.0.Beta1
+
+* Add ability to insert fields from op field in ExtractNewDocumentState [DBZ-1791](https://issues.jboss.org/browse/DBZ-1791)
+* Test with MySQL 8.0.20 [DBZ-2041](https://issues.jboss.org/browse/DBZ-2041)
+* Update debezium-examples/tutorial README docker-compose file is missing [DBZ-2059](https://issues.jboss.org/browse/DBZ-2059)
+* Skip tests that are no longer compatible with Kafka 1.x [DBZ-2068](https://issues.jboss.org/browse/DBZ-2068)
+* Remove additional Jackson dependencies as of AK 2.5 [DBZ-2076](https://issues.jboss.org/browse/DBZ-2076)
+* Make EventProcessingFailureHandlingIT resilient against timing issues [DBZ-2078](https://issues.jboss.org/browse/DBZ-2078)
+* Tar packages must use posix format [DBZ-2088](https://issues.jboss.org/browse/DBZ-2088)
+* Remove unused sourceInfo variable [DBZ-2090](https://issues.jboss.org/browse/DBZ-2090)
+
+
+
## 1.2.0.Beta1
May 7th, 2020 [Detailed release notes](https://issues.redhat.com/secure/ReleaseNote.jspa?projectId=12317320&version=12345561)
diff --git a/COPYRIGHT.txt b/COPYRIGHT.txt
index 7a36cb7990f..8799ffc464b 100644
--- a/COPYRIGHT.txt
+++ b/COPYRIGHT.txt
@@ -61,6 +61,7 @@ Ewen Cheslack-Postava
Fabian Aussems
Fabio Cantarini
Fatih Güçlü Akkaya
+Fándly Gergő
Felix Eckhardt
Fintan Bolton
Frank Mormino
@@ -102,6 +103,7 @@ Jos Huiting
Josh Arenberg
Josh Stanfield
Joy Gao
+Juan Antonio Pedraza
Jun Du
Jure Kajzer
Keith Barber
@@ -122,6 +124,7 @@ Mariusz Strzelecki
Matteo Capitanio
Mathieu Rozieres
Matthias Wessendorf
+Max Kaplan
Melissa Winstanley
Mike Graham
Mincong Huang
diff --git a/LICENSE-3rd-PARTIES.txt b/LICENSE-3rd-PARTIES.txt
index d460505ac2b..debc052052b 100644
--- a/LICENSE-3rd-PARTIES.txt
+++ b/LICENSE-3rd-PARTIES.txt
@@ -2267,3 +2267,227 @@ https://github.com/mongodb/mongo-java-driver/blob/master/LICENSE.txt
See the License for the specific language governing permissions and
limitations under the License.
+=====
+
+https://github.com/oracle/graal/blob/master/sdk/LICENSE.md
+The Universal Permissive License (UPL), Version 1.0
+
+Subject to the condition set forth below, permission is hereby granted to any person obtaining a copy of this software, associated documentation and/or data (collectively the "Software"), free of charge and under any and all copyright rights in the Software, and any and all patent rights owned or freely licensable by each licensor hereunder covering either (i) the unmodified Software as contributed to or provided by such licensor, or (ii) the Larger Works (as defined below), to deal in both
+
+(a) the Software, and
+
+(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is included with the Software each a "Larger Work" to which the Software is contributed by such licensors),
+
+without restriction, including without limitation the rights to copy, create derivative works of, display, perform, and distribute the Software and make, use, sell, offer for sale, import, export, have made, and have sold the Software and the Larger Work(s), and to sublicense the foregoing rights on either these or other terms.
+
+This license is subject to the following condition:
+
+The above copyright notice and either this complete permission notice or at a minimum a reference to the UPL must be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+====
+
+https://github.com/quarkusio/quarkus/blob/master/LICENSE.txt
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/debezium-api/pom.xml b/debezium-api/pom.xml
index 8cfdb782efe..f7dab5f3a5f 100644
--- a/debezium-api/pom.xml
+++ b/debezium-api/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
diff --git a/debezium-api/src/main/java/io/debezium/engine/ChangeEvent.java b/debezium-api/src/main/java/io/debezium/engine/ChangeEvent.java
index af92412d25d..7a655af7d40 100644
--- a/debezium-api/src/main/java/io/debezium/engine/ChangeEvent.java
+++ b/debezium-api/src/main/java/io/debezium/engine/ChangeEvent.java
@@ -19,4 +19,9 @@ public interface ChangeEvent {
public K key();
public V value();
+
+ /**
+ * @return A name of the logical destination for which the event is intended
+ */
+ public String destination();
}
diff --git a/debezium-assembly-descriptors/pom.xml b/debezium-assembly-descriptors/pom.xml
index f94b9e1c195..ba3f580fe32 100644
--- a/debezium-assembly-descriptors/pom.xml
+++ b/debezium-assembly-descriptors/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
diff --git a/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution-no-drivers.xml b/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution-no-drivers.xml
index e57097511e5..6167a81316e 100644
--- a/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution-no-drivers.xml
+++ b/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution-no-drivers.xml
@@ -14,9 +14,14 @@
falseruntimefalse
+ truecom.fasterxml.jackson.core:jackson-core:*
+ com.fasterxml.jackson.core:jackson-databind:*
+ com.fasterxml.jackson.core:jackson-annotations:*
+ com.fasterxml.jackson.datatype:jackson-datatype-jsr310:*
+ org.reflections:reflections:*${assembly.exclude.1}
diff --git a/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution.xml b/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution.xml
index e8207623942..4edf31a9f40 100644
--- a/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution.xml
+++ b/debezium-assembly-descriptors/src/main/resources/assemblies/connector-distribution.xml
@@ -14,12 +14,14 @@
falseruntimefalse
+ truecom.fasterxml.jackson.core:jackson-core:*com.fasterxml.jackson.core:jackson-databind:*com.fasterxml.jackson.core:jackson-annotations:*com.fasterxml.jackson.datatype:jackson-datatype-jsr310:*
+ org.reflections:reflections:*
diff --git a/debezium-assembly-descriptors/src/main/resources/assemblies/server-distribution.xml b/debezium-assembly-descriptors/src/main/resources/assemblies/server-distribution.xml
new file mode 100644
index 00000000000..a23d437bd67
--- /dev/null
+++ b/debezium-assembly-descriptors/src/main/resources/assemblies/server-distribution.xml
@@ -0,0 +1,60 @@
+
+
+ distribution
+
+ tar.gz
+ zip
+
+ false
+
+
+ ${project.artifactId}/lib
+ false
+ runtime
+ false
+ true
+
+ org.apache.kafka:kafka-tools:*
+ com.github.luben:zstd-jni:*
+ org.lz4:lz4-java:*
+ org.xerial.snappy:snappy-java:*
+ javax.ws.rs:javax.ws.rs-api:*
+ org.apache.kafka:connect-file:*
+ org.glassfish.jersey.*:*:*
+ org.eclipse.jetty:*:*
+ org.apache.maven:*:*
+
+
+
+
+
+
+ ${project.basedir}/..
+ ${project.artifactId}
+
+ README*
+ CHANGELOG*
+ CONTRIBUTE*
+ COPYRIGHT*
+ LICENSE*
+
+ true
+
+
+ ${project.build.directory}
+ ${project.artifactId}
+
+ ${project.artifactId}-${project.version}-runner.jar
+
+
+
+ ${project.build.sourceDirectory}/../distro
+ ${project.artifactId}
+
+ **/*
+
+
+
+
diff --git a/debezium-connector-mongodb/pom.xml b/debezium-connector-mongodb/pom.xml
index 31b53971291..e73af582947 100644
--- a/debezium-connector-mongodb/pom.xml
+++ b/debezium-connector-mongodb/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
@@ -272,6 +272,7 @@
${assembly.descriptor}
+ posix
diff --git a/debezium-connector-mongodb/src/main/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentState.java b/debezium-connector-mongodb/src/main/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentState.java
index de40b50d6ee..9030aabe82d 100644
--- a/debezium-connector-mongodb/src/main/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentState.java
+++ b/debezium-connector-mongodb/src/main/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentState.java
@@ -5,10 +5,15 @@
*/
package io.debezium.connector.mongodb.transforms;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Importance;
@@ -20,9 +25,12 @@
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.errors.ConnectException;
+import org.apache.kafka.connect.header.ConnectHeaders;
+import org.apache.kafka.connect.header.Headers;
import org.apache.kafka.connect.transforms.ExtractField;
import org.apache.kafka.connect.transforms.Flatten;
import org.apache.kafka.connect.transforms.Transformation;
+import org.apache.kafka.connect.transforms.util.SchemaUtil;
import org.bson.BsonBoolean;
import org.bson.BsonDocument;
import org.bson.BsonNull;
@@ -34,10 +42,13 @@
import io.debezium.config.EnumeratedValue;
import io.debezium.config.Field;
import io.debezium.data.Envelope;
+import io.debezium.data.Envelope.Operation;
+import io.debezium.pipeline.txmetadata.TransactionMonitor;
import io.debezium.schema.FieldNameSelector;
import io.debezium.transforms.ExtractNewRecordStateConfigDefinition;
import io.debezium.transforms.ExtractNewRecordStateConfigDefinition.DeleteHandling;
import io.debezium.transforms.SmtManager;
+import io.debezium.util.Strings;
/**
* Debezium Mongo Connector generates the CDC records in String format. Sink connectors usually are not able to parse
@@ -101,6 +112,7 @@ public static ArrayEncoding parse(String value, String defaultValue) {
}
private static final Logger LOGGER = LoggerFactory.getLogger(ExtractNewDocumentState.class);
+ private static final Pattern FIELD_SEPARATOR = Pattern.compile("\\.");
private static final Field ARRAY_ENCODING = Field.create("array.encoding")
.withDisplayName("Array encoding")
@@ -137,6 +149,26 @@ public static ArrayEncoding parse(String value, String defaultValue) {
.withDescription("Whether field names will be sanitized to Avro naming conventions")
.withDefault(Boolean.FALSE);
+ public static final Field ADD_SOURCE_FIELDS = Field.create("add.source.fields")
+ .withDisplayName("Adds the specified fields from the 'source' field from the payload if they exist.")
+ .withType(ConfigDef.Type.LIST)
+ .withWidth(ConfigDef.Width.LONG)
+ .withImportance(ConfigDef.Importance.LOW)
+ .withDefault("")
+ .withDescription("DEPRECATED. Please use the 'add.fields' option instead. "
+ + "Adds each field listed from the 'source' element of the payload, prefixed with __ "
+ + "Example: 'version,connector' would add __version and __connector fields");
+
+ public static final Field OPERATION_HEADER = Field.create("operation.header")
+ .withDisplayName("Adds a message header representing the applied operation")
+ .withType(Type.BOOLEAN)
+ .withWidth(Width.SHORT)
+ .withImportance(ConfigDef.Importance.LOW)
+ .withDefault(false)
+ .withDescription("DEPRECATED. Please use the 'add.fields' option instead. "
+ + "Adds the operation type of the change event as a header."
+ + "Its key is '" + ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY + "'");
+
private final ExtractField afterExtractor = new ExtractField.Value<>();
private final ExtractField patchExtractor = new ExtractField.Value<>();
private final ExtractField keyExtractor = new ExtractField.Key<>();
@@ -145,7 +177,9 @@ public static ArrayEncoding parse(String value, String defaultValue) {
private final Flatten recordFlattener = new Flatten.Value<>();
private boolean addOperationHeader;
- private String[] addSourceFields;
+ private List addSourceFields;
+ private List additionalHeaders;
+ private List additionalFields;
private boolean flattenStruct;
private String delimiter;
@@ -171,8 +205,13 @@ public R apply(R record) {
LOGGER.trace("Tombstone {} arrived and requested to be dropped", record.key());
return null;
}
- if (addOperationHeader) {
- record.headers().addString(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY, Envelope.Operation.DELETE.code());
+ if (!additionalHeaders.isEmpty()) {
+ Headers headersToAdd = makeHeaders(additionalHeaders, (Struct) record.value());
+ headersToAdd.forEach(h -> record.headers().add(h));
+ }
+ else if (addOperationHeader) {
+ LOGGER.warn("operation.header has been deprecated and is scheduled for removal. Use add.headers instead.");
+ record.headers().addString(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY, Operation.DELETE.code());
}
return newRecord(record, keyDocument, valueDocument);
}
@@ -184,7 +223,12 @@ public R apply(R record) {
final R afterRecord = afterExtractor.apply(record);
final R patchRecord = patchExtractor.apply(record);
- if (addOperationHeader) {
+ if (!additionalHeaders.isEmpty()) {
+ Headers headersToAdd = makeHeaders(additionalHeaders, (Struct) record.value());
+ headersToAdd.forEach(h -> record.headers().add(h));
+ }
+ else if (addOperationHeader) {
+ LOGGER.warn("operation.header has been deprecated and is scheduled for removal. Use add.headers instead.");
record.headers().addString(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY, ((Struct) record.value()).get("op").toString());
}
@@ -258,6 +302,10 @@ private R newRecord(R record, BsonDocument keyDocument, BsonDocument valueDocume
addSourceFieldsSchema(addSourceFields, record, valueSchemaBuilder);
}
+ if (!additionalFields.isEmpty()) {
+ addAdditionalFieldsSchema(additionalFields, record, valueSchemaBuilder);
+ }
+
finalValueSchema = valueSchemaBuilder.build();
finalValueStruct = new Struct(finalValueSchema);
for (Entry valuePairsForStruct : valuePairs) {
@@ -276,6 +324,10 @@ private R newRecord(R record, BsonDocument keyDocument, BsonDocument valueDocume
if (addSourceFields != null) {
addSourceFieldsValue(addSourceFields, record, finalValueStruct);
}
+
+ if (!additionalFields.isEmpty()) {
+ addFields(additionalFields, record, finalValueStruct);
+ }
}
R newRecord = record.newRecord(record.topic(), record.kafkaPartition(), finalKeySchema,
@@ -288,7 +340,7 @@ private R newRecord(R record, BsonDocument keyDocument, BsonDocument valueDocume
return newRecord;
}
- private void addSourceFieldsSchema(String[] addSourceFields, R originalRecord, SchemaBuilder valueSchemaBuilder) {
+ private void addSourceFieldsSchema(List addSourceFields, R originalRecord, SchemaBuilder valueSchemaBuilder) {
Schema sourceSchema = originalRecord.valueSchema().field("source").schema();
for (String sourceField : addSourceFields) {
if (sourceSchema.field(sourceField) == null) {
@@ -299,7 +351,14 @@ private void addSourceFieldsSchema(String[] addSourceFields, R originalRecord, S
}
}
- private void addSourceFieldsValue(String[] addSourceFields, R originalRecord, Struct valueStruct) {
+ private void addAdditionalFieldsSchema(List additionalFields, R originalRecord, SchemaBuilder valueSchemaBuilder) {
+ Schema sourceSchema = originalRecord.valueSchema();
+ for (FieldReference fieldReference : additionalFields) {
+ valueSchemaBuilder.field(fieldReference.newFieldName, fieldReference.getSchema(sourceSchema));
+ }
+ }
+
+ private void addSourceFieldsValue(List addSourceFields, R originalRecord, Struct valueStruct) {
Struct sourceValue = ((Struct) originalRecord.value()).getStruct("source");
for (String sourceField : addSourceFields) {
valueStruct.put(ExtractNewRecordStateConfigDefinition.METADATA_FIELD_PREFIX + sourceField,
@@ -307,6 +366,15 @@ private void addSourceFieldsValue(String[] addSourceFields, R originalRecord, St
}
}
+ private void addFields(List additionalFields, R originalRecord, Struct value) {
+ Struct originalRecordValue = (Struct) originalRecord.value();
+
+ // Update the value with the new fields
+ for (FieldReference fieldReference : additionalFields) {
+ value.put(fieldReference.newFieldName, fieldReference.getValue(originalRecordValue));
+ }
+ }
+
private BsonDocument getUpdateDocument(R patchRecord, BsonDocument keyDocument) {
BsonDocument valueDocument = new BsonDocument();
BsonDocument document = BsonDocument.parse(patchRecord.value().toString());
@@ -360,6 +428,24 @@ private BsonDocument getInsertDocument(R record, BsonDocument key) {
return valueDocument;
}
+ private Headers makeHeaders(List additionalHeaders, Struct originalRecordValue) {
+ Headers headers = new ConnectHeaders();
+
+ for (FieldReference fieldReference : additionalHeaders) {
+ // add "d" operation header to tombstone events
+ if (originalRecordValue == null) {
+ if (Envelope.FieldName.OPERATION.equals(fieldReference.field)) {
+ headers.addString(fieldReference.newFieldName, Operation.DELETE.code());
+ }
+ continue;
+ }
+ headers.add(fieldReference.getNewFieldName(), fieldReference.getValue(originalRecordValue),
+ fieldReference.getSchema(originalRecordValue.schema()));
+ }
+
+ return headers;
+ }
+
@Override
public ConfigDef config() {
final ConfigDef config = new ConfigDef();
@@ -381,9 +467,12 @@ public void configure(final Map map) {
smtManager = new SmtManager<>(config);
final Field.Set configFields = Field.setOf(ARRAY_ENCODING, FLATTEN_STRUCT, DELIMITER,
- ExtractNewRecordStateConfigDefinition.OPERATION_HEADER,
+ OPERATION_HEADER,
+ ADD_SOURCE_FIELDS,
ExtractNewRecordStateConfigDefinition.HANDLE_DELETES,
ExtractNewRecordStateConfigDefinition.DROP_TOMBSTONES,
+ ExtractNewRecordStateConfigDefinition.ADD_HEADERS,
+ ExtractNewRecordStateConfigDefinition.ADD_FIELDS,
SANITIZE_FIELD_NAMES);
if (!config.validateAndRecord(configFields, LOGGER::error)) {
@@ -394,10 +483,12 @@ public void configure(final Map map) {
ArrayEncoding.parse(config.getString(ARRAY_ENCODING)),
FieldNameSelector.defaultNonRelationalSelector(config.getBoolean(SANITIZE_FIELD_NAMES)));
- addOperationHeader = config.getBoolean(ExtractNewRecordStateConfigDefinition.OPERATION_HEADER);
+ addOperationHeader = config.getBoolean(OPERATION_HEADER);
+
+ addSourceFields = determineAdditionalSourceField(config.getString(ADD_SOURCE_FIELDS));
- addSourceFields = config.getString(ExtractNewRecordStateConfigDefinition.ADD_SOURCE_FIELDS).isEmpty() ? null
- : config.getString(ExtractNewRecordStateConfigDefinition.ADD_SOURCE_FIELDS).split(",");
+ additionalHeaders = FieldReference.fromConfiguration(config.getString(ExtractNewRecordStateConfigDefinition.ADD_HEADERS));
+ additionalFields = FieldReference.fromConfiguration(config.getString(ExtractNewRecordStateConfigDefinition.ADD_FIELDS));
flattenStruct = config.getBoolean(FLATTEN_STRUCT);
delimiter = config.getString(DELIMITER);
@@ -420,4 +511,107 @@ public void configure(final Map map) {
delegateConfig.put("delimiter", delimiter);
recordFlattener.configure(delegateConfig);
}
+
+ private static List determineAdditionalSourceField(String addSourceFieldsConfig) {
+ if (Strings.isNullOrEmpty(addSourceFieldsConfig)) {
+ return Collections.emptyList();
+ }
+ return Arrays.stream(addSourceFieldsConfig.split(",")).map(String::trim).collect(Collectors.toList());
+ }
+
+ /**
+ * Represents a field that should be added to the outgoing record as a header attribute or struct field.
+ */
+ // todo: refactor with ExtractNewRecordState
+ private static class FieldReference {
+ /**
+ * The struct ("source", "transaction") hosting the given field, or {@code null} for "op" and "ts_ms".
+ */
+ private final String struct;
+
+ /**
+ * The simple field name.
+ */
+ private final String field;
+
+ /**
+ * The name for the outgoing attribute/field, e.g. "__op" or "__source_ts_ms".
+ */
+ private final String newFieldName;
+
+ private FieldReference(String field) {
+ String[] parts = FIELD_SEPARATOR.split(field);
+
+ if (parts.length == 1) {
+ this.struct = determineStruct(parts[0]);
+ this.field = parts[0];
+ this.newFieldName = ExtractNewRecordStateConfigDefinition.METADATA_FIELD_PREFIX + field;
+ }
+ else if (parts.length == 2) {
+ this.struct = parts[0];
+
+ if (!(this.struct.equals(Envelope.FieldName.SOURCE) || this.struct.equals(Envelope.FieldName.TRANSACTION))) {
+ throw new IllegalArgumentException("Unexpected field name: " + field);
+ }
+
+ this.field = parts[1];
+ this.newFieldName = ExtractNewRecordStateConfigDefinition.METADATA_FIELD_PREFIX + this.struct + "_" + this.field;
+ }
+ else {
+ throw new IllegalArgumentException("Unexpected field value: " + field);
+ }
+ }
+
+ /**
+ * Determine the struct hosting the given unqualified field.
+ */
+ private static String determineStruct(String simpleFieldName) {
+ if (simpleFieldName.equals(Envelope.FieldName.OPERATION) || simpleFieldName.equals(Envelope.FieldName.TIMESTAMP)) {
+ return null;
+ }
+ else if (simpleFieldName.equals(TransactionMonitor.DEBEZIUM_TRANSACTION_ID_KEY) ||
+ simpleFieldName.equals(TransactionMonitor.DEBEZIUM_TRANSACTION_DATA_COLLECTION_ORDER_KEY) ||
+ simpleFieldName.equals(TransactionMonitor.DEBEZIUM_TRANSACTION_TOTAL_ORDER_KEY)) {
+ return Envelope.FieldName.TRANSACTION;
+ }
+ else {
+ return Envelope.FieldName.SOURCE;
+ }
+ }
+
+ static List fromConfiguration(String addHeadersConfig) {
+ if (Strings.isNullOrEmpty(addHeadersConfig)) {
+ return Collections.emptyList();
+ }
+ else {
+ return Arrays.stream(addHeadersConfig.split(","))
+ .map(String::trim)
+ .map(FieldReference::new)
+ .collect(Collectors.toList());
+ }
+ }
+
+ String getNewFieldName() {
+ return newFieldName;
+ }
+
+ Object getValue(Struct originalRecordValue) {
+ Struct parentStruct = struct != null ? (Struct) originalRecordValue.get(struct) : originalRecordValue;
+
+ // transaction is optional; e.g. not present during snapshotting atm.
+ return parentStruct != null ? parentStruct.get(field) : null;
+ }
+
+ Schema getSchema(Schema originalRecordSchema) {
+ Schema parentSchema = struct != null ? originalRecordSchema.field(struct).schema() : originalRecordSchema;
+
+ org.apache.kafka.connect.data.Field schemaField = parentSchema.field(field);
+
+ if (schemaField == null) {
+ throw new IllegalArgumentException("Unexpected field name: " + field);
+ }
+
+ return SchemaUtil.copySchemaBasics(schemaField.schema()).optional().build();
+ }
+ }
}
diff --git a/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTest.java b/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTest.java
index 5a04a568c5d..8dfd0abee7a 100644
--- a/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTest.java
+++ b/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTest.java
@@ -47,12 +47,6 @@
public class ExtractNewDocumentStateTest {
private static final String SERVER_NAME = "serverX";
- private static final String FLATTEN_STRUCT = "flatten.struct";
- private static final String DELIMITER = "flatten.struct.delimiter";
- private static final String OPERATION_HEADER = "operation.header";
- private static final String HANDLE_DELETES = "delete.handling.mode";
- private static final String DROP_TOMBSTONE = "drop.tombstones";
- private static final String ADD_SOURCE_FIELDS = "add.source.fields";
private Filters filters;
private SourceInfo source;
diff --git a/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTestIT.java b/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTestIT.java
index f9e8befea1f..ccd54d92b74 100644
--- a/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTestIT.java
+++ b/debezium-connector-mongodb/src/test/java/io/debezium/connector/mongodb/transforms/ExtractNewDocumentStateTestIT.java
@@ -29,6 +29,7 @@
import org.junit.Test;
import io.debezium.data.Envelope;
+import io.debezium.data.Envelope.Operation;
import io.debezium.data.SchemaUtil;
import io.debezium.doc.FixFor;
import io.debezium.transforms.ExtractNewRecordStateConfigDefinition;
@@ -52,6 +53,8 @@ public class ExtractNewDocumentStateTestIT extends AbstractExtractNewDocumentSta
private static final String OPERATION_HEADER = "operation.header";
private static final String DROP_TOMBSTONE = "drop.tombstones";
private static final String ADD_SOURCE_FIELDS = "add.source.fields";
+ private static final String ADD_HEADERS = "add.headers";
+ private static final String ADD_FIELDS = "add.fields";
@Override
protected String getCollectionName() {
@@ -77,7 +80,7 @@ public void shouldDropTombstoneByDefault() throws InterruptedException {
});
// First delete record to arrive is coming from the oplog
- SourceRecord firstRecord = getRecordByOperation(Envelope.Operation.DELETE);
+ SourceRecord firstRecord = getRecordByOperation(Operation.DELETE);
final SourceRecord transformedDelete = transformation.apply(firstRecord);
assertThat(transformedDelete).isNull();
@@ -274,7 +277,7 @@ public void shouldAddSourceFields() throws InterruptedException {
waitForStreamingRunning();
final Map props = new HashMap<>();
- props.put(ADD_SOURCE_FIELDS, "h,ts_ms,ord,db,rs");
+ props.put(ADD_SOURCE_FIELDS, "h,ts_ms,ord , db,rs");
transformation.configure(props);
// insert
@@ -400,7 +403,7 @@ public void shouldTransformRecordForInsertEvent() throws InterruptedException {
// then assert operation header is insert
Iterator operationHeader = transformed.headers().allWithName(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY);
assertThat((operationHeader).hasNext()).isTrue();
- assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.CREATE.code());
+ assertThat(operationHeader.next().value().toString()).isEqualTo(Operation.CREATE.code());
// acquire key and value Structs
Struct key = (Struct) transformed.key();
@@ -520,7 +523,7 @@ public void shouldGenerateRecordForUpdateEvent() throws InterruptedException {
// then assert operation header is update
Iterator operationHeader = transformed.headers().allWithName(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY);
assertThat((operationHeader).hasNext()).isTrue();
- assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.UPDATE.code());
+ assertThat(operationHeader.next().value().toString()).isEqualTo(Operation.UPDATE.code());
// acquire key and value Structs
Struct key = (Struct) transformed.key();
@@ -746,7 +749,7 @@ public void shouldGenerateRecordHeaderForTombstone() throws InterruptedException
// then assert operation header is delete
Iterator operationHeader = transformed.headers().allWithName(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY);
assertThat((operationHeader).hasNext()).isTrue();
- assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.DELETE.code());
+ assertThat(operationHeader.next().value().toString()).isEqualTo(Operation.DELETE.code());
assertThat(value).isNull();
}
@@ -918,7 +921,7 @@ public void shouldGenerateRecordForDeleteEvent() throws InterruptedException {
// then assert operation header is delete
Iterator operationHeader = transformed.headers().allWithName(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY);
assertThat((operationHeader).hasNext()).isTrue();
- assertThat(operationHeader.next().value().toString()).isEqualTo(Envelope.Operation.DELETE.code());
+ assertThat(operationHeader.next().value().toString()).isEqualTo(Operation.DELETE.code());
// acquire key and value Structs
Struct key = (Struct) transformed.key();
@@ -1195,7 +1198,248 @@ public void shouldFlattenWithDelimiterTransformRecordForUpdateEvent() throws Int
assertThat(value.schema().fields()).hasSize(4);
}
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddHeader() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_HEADERS, "op");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(transformed.headers()).hasSize(1);
+ assertThat(getSourceRecordHeaderByKey(transformed, "__op")).isEqualTo(Operation.CREATE.code());
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddHeadersForMissingOrInvalidFields() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_HEADERS, "op,id");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(transformed.headers()).hasSize(2);
+ assertThat(getSourceRecordHeaderByKey(transformed, "__op")).isEqualTo(Operation.CREATE.code());
+ assertThat(getSourceRecordHeaderByKey(transformed, "__id")).isNull();
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddHeadersSpecifyingStruct() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_HEADERS, "op,source.rs,source.collection");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(transformed.headers()).hasSize(3);
+ assertThat(getSourceRecordHeaderByKey(transformed, "__op")).isEqualTo(Operation.CREATE.code());
+ assertThat(getSourceRecordHeaderByKey(transformed, "__source_rs")).isEqualTo("rs0");
+ assertThat(getSourceRecordHeaderByKey(transformed, "__source_collection")).isEqualTo(getCollectionName());
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddField() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_FIELDS, "op");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.CREATE.code());
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddFields() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_FIELDS, "op , ts_ms");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.CREATE.code());
+ assertThat(((Struct) transformed.value()).get("__ts_ms")).isNotNull();
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddFieldsForMissingOptionalField() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_FIELDS, "op,id");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.CREATE.code());
+ assertThat(((Struct) transformed.value()).get("__id")).isNull();
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddFieldsSpecifyStruct() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(ADD_FIELDS, "op,source.rs,source.collection");
+ transformation.configure(props);
+
+ final SourceRecord createRecord = createCreateRecord();
+ final SourceRecord transformed = transformation.apply(createRecord);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.CREATE.code());
+ assertThat(((Struct) transformed.value()).get("__source_rs")).isEqualTo("rs0");
+ assertThat(((Struct) transformed.value()).get("__source_collection")).isEqualTo(getCollectionName());
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddFieldHandleDeleteRewrite() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(HANDLE_DELETES, "rewrite");
+ props.put(ADD_FIELDS, "op");
+ transformation.configure(props);
+
+ final SourceRecord deleteRecord = createDeleteRecordWithTombstone().allRecordsInOrder().get(0);
+ final SourceRecord transformed = transformation.apply(deleteRecord);
+ assertThat(((Struct) transformed.value()).get("__deleted")).isEqualTo(true);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.DELETE.code());
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void tesAddFieldsHandleDeleteRewrite() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(HANDLE_DELETES, "rewrite");
+ props.put(ADD_FIELDS, "op,ts_ms");
+ transformation.configure(props);
+
+ final SourceRecord deleteRecord = createDeleteRecordWithTombstone().allRecordsInOrder().get(0);
+ final SourceRecord transformed = transformation.apply(deleteRecord);
+ assertThat(((Struct) transformed.value()).get("__deleted")).isEqualTo(true);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.DELETE.code());
+ assertThat(((Struct) transformed.value()).get("__ts_ms")).isNotNull();
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddFieldsSpecifyStructHandleDeleteRewrite() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(HANDLE_DELETES, "rewrite");
+ props.put(ADD_FIELDS, "op,source.rs,source.collection");
+ transformation.configure(props);
+
+ final SourceRecord deleteRecord = createDeleteRecordWithTombstone().allRecordsInOrder().get(0);
+ final SourceRecord transformed = transformation.apply(deleteRecord);
+ assertThat(((Struct) transformed.value()).get("__deleted")).isEqualTo(true);
+ assertThat(((Struct) transformed.value()).get("__op")).isEqualTo(Operation.DELETE.code());
+ assertThat(((Struct) transformed.value()).get("__source_rs")).isEqualTo("rs0");
+ assertThat(((Struct) transformed.value()).get("__source_collection")).isEqualTo(getCollectionName());
+ }
+
+ @Test
+ @FixFor("DBZ-1791")
+ public void testAddFieldsHandleDeleteRewriteAndTombstone() throws Exception {
+ waitForStreamingRunning();
+
+ final Map props = new HashMap<>();
+ props.put(HANDLE_DELETES, "rewrite");
+ props.put(ADD_FIELDS, "op,ts_ms");
+ props.put(DROP_TOMBSTONE, "false");
+ transformation.configure(props);
+
+ final SourceRecords records = createDeleteRecordWithTombstone();
+
+ final SourceRecord deleteRecord = records.allRecordsInOrder().get(0);
+ final SourceRecord deleteTransformed = transformation.apply(deleteRecord);
+ assertThat(((Struct) deleteTransformed.value()).get("__deleted")).isEqualTo(true);
+ assertThat(((Struct) deleteTransformed.value()).get("__op")).isEqualTo(Operation.DELETE.code());
+ assertThat(((Struct) deleteTransformed.value()).get("__ts_ms")).isNotNull();
+
+ final SourceRecord tombstoneRecord = records.allRecordsInOrder().get(1);
+ final SourceRecord tombstoneTransformed = transformation.apply(tombstoneRecord);
+ assertThat(tombstoneTransformed.value()).isNull();
+ }
+
+ private SourceRecord createCreateRecord() throws Exception {
+ ObjectId objId = new ObjectId();
+ Document obj = new Document()
+ .append("_id", objId)
+ .append("name", "Sally")
+ .append("address", new Document()
+ .append("struct", "Morris Park Ave")
+ .append("zipcode", "10462"));
+
+ primary().execute("insert", client -> {
+ client.getDatabase(DB_NAME).getCollection(getCollectionName()).insertOne(obj);
+ });
+
+ final SourceRecords records = consumeRecordsByTopic(1);
+ assertThat(records.recordsForTopic(topicName()).size()).isEqualTo(1);
+ assertNoRecordsToConsume();
+
+ return records.allRecordsInOrder().get(0);
+ }
+
+ private SourceRecords createDeleteRecordWithTombstone() throws Exception {
+ ObjectId objId = new ObjectId();
+ Document obj = new Document()
+ .append("_id", objId)
+ .append("name", "Sally")
+ .append("address", new Document()
+ .append("struct", "Morris Park Ave")
+ .append("zipcode", "10462"));
+
+ primary().execute("insert", client -> {
+ client.getDatabase(DB_NAME).getCollection(getCollectionName()).insertOne(obj);
+ });
+
+ final SourceRecords createRecords = consumeRecordsByTopic(1);
+ assertThat(createRecords.recordsForTopic(topicName()).size()).isEqualTo(1);
+ assertNoRecordsToConsume();
+
+ primary().execute("delete", client -> {
+ Document filter = Document.parse("{\"_id\": {\"$oid\": \"" + objId + "\"}}");
+ client.getDatabase(DB_NAME).getCollection(getCollectionName()).deleteOne(filter);
+ });
+
+ final SourceRecords deleteRecords = consumeRecordsByTopic(2);
+ assertThat(deleteRecords.recordsForTopic(topicName()).size()).isEqualTo(2);
+ assertNoRecordsToConsume();
+
+ return deleteRecords;
+ }
+
private static void waitForStreamingRunning() throws InterruptedException {
waitForStreamingRunning("mongodb", SERVER_NAME);
}
+
+ private String getSourceRecordHeaderByKey(SourceRecord record, String headerKey) {
+ Iterator headers = record.headers().allWithName(headerKey);
+ if (!headers.hasNext()) {
+ return null;
+ }
+ Object value = headers.next().value();
+ return value != null ? value.toString() : null;
+ }
}
diff --git a/debezium-connector-mysql/pom.xml b/debezium-connector-mysql/pom.xml
index 7a83b4d53d7..92c81f262d7 100644
--- a/debezium-connector-mysql/pom.xml
+++ b/debezium-connector-mysql/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
@@ -432,6 +432,7 @@
${assembly.descriptor}
+ posix
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlConnectorConfig.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlConnectorConfig.java
index 14c71d32a81..d34c8300786 100644
--- a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlConnectorConfig.java
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlConnectorConfig.java
@@ -486,6 +486,7 @@ public static GtidNewChannelPosition parse(String value, String defaultValue) {
private static final String TABLE_WHITELIST_NAME = "table.whitelist";
private static final String TABLE_BLACKLIST_NAME = "table.blacklist";
private static final String TABLE_IGNORE_BUILTIN_NAME = "table.ignore.builtin";
+ private static final String TABLE_IGNORE_BUILTIN_DDL_NAME = "table.ignore.builtin.ddl";
/**
* Default size of the binlog buffer used for examining transactions and
@@ -618,6 +619,15 @@ public static GtidNewChannelPosition parse(String value, String defaultValue) {
.withDependents(DATABASE_WHITELIST_NAME)
.withDescription("Flag specifying whether built-in tables should be ignored.");
+ public static final Field TABLES_IGNORE_BUILTIN_DDL = Field.create(TABLE_IGNORE_BUILTIN_DDL_NAME)
+ .withDisplayName("Ignore DDL of system databases and tables")
+ .withType(Type.BOOLEAN)
+ .withWidth(Width.SHORT)
+ .withImportance(Importance.LOW)
+ .withDefault(false)
+ .withValidation(Field::isBoolean)
+ .withDescription("Flag specifying whether DDL of built-in tables should be ignored.");
+
public static final Field JDBC_DRIVER = Field.create("database.jdbc.driver")
.withDisplayName("Jdbc Driver Class Name")
.withType(Type.CLASS)
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlSchema.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlSchema.java
index 4a301eabcf0..8ed8e1b7694 100644
--- a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlSchema.java
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/MySqlSchema.java
@@ -76,6 +76,7 @@ public class MySqlSchema extends RelationalDatabaseSchema {
private final boolean skipUnparseableDDL;
private final boolean storeOnlyMonitoredTablesDdl;
private boolean recoveredTables;
+ private final boolean ignoreBuildInTablesDdl;
/**
* Create a schema component given the supplied {@link MySqlConnectorConfig MySQL connector configuration}.
@@ -118,6 +119,7 @@ public MySqlSchema(MySqlConnectorConfig configuration,
.build();
this.skipUnparseableDDL = dbHistoryConfig.getBoolean(DatabaseHistory.SKIP_UNPARSEABLE_DDL_STATEMENTS);
this.storeOnlyMonitoredTablesDdl = dbHistoryConfig.getBoolean(DatabaseHistory.STORE_ONLY_MONITORED_TABLES_DDL);
+ this.ignoreBuildInTablesDdl = config.getBoolean(MySqlConnectorConfig.TABLES_IGNORE_BUILTIN_DDL);
this.ddlParser = new MySqlAntlrDdlParser(getValueConverters(configuration), getTableFilter());
this.ddlChanges = this.ddlParser.getDdlChanges();
@@ -361,7 +363,17 @@ else if (filters.databaseFilter().test(databaseName) || databaseName == null ||
// - all DDLs if configured
// - or global SET variables
// - or DDLs for monitored objects
- if (!storeOnlyMonitoredTablesDdl || isGlobalSetVariableStatement(ddlStatements, databaseName) || changes.stream().anyMatch(filters().tableFilter()::test)) {
+ if (!storeOnlyMonitoredTablesDdl) {
+ if (isIgnoreBuildInTablesDdl()) {
+ if (!filters.builtInDatabaseFilter().test(databaseName)) {
+ dbHistory.record(source.partition(), source.offset(), databaseName, ddlStatements);
+ }
+ }
+ else {
+ dbHistory.record(source.partition(), source.offset(), databaseName, ddlStatements);
+ }
+ }
+ else if (isGlobalSetVariableStatement(ddlStatements, databaseName) || changes.stream().anyMatch(filters().tableFilter()::test)) {
dbHistory.record(source.partition(), source.offset(), databaseName, ddlStatements);
}
}
@@ -393,6 +405,14 @@ public boolean isStoreOnlyMonitoredTablesDdl() {
return storeOnlyMonitoredTablesDdl;
}
+ /**
+ * @return true if DDL statements of builtin tables should be ignored in database history,
+ * false if all tables should be stored
+ */
+ public boolean isIgnoreBuildInTablesDdl() {
+ return ignoreBuildInTablesDdl;
+ }
+
@Override
public boolean tableInformationComplete() {
return recoveredTables;
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/SnapshotReader.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/SnapshotReader.java
index d5965dcd873..df561a593e2 100644
--- a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/SnapshotReader.java
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/SnapshotReader.java
@@ -508,6 +508,9 @@ protected void execute() {
break;
}
String dbName = entry.getKey();
+ // if (!createTableFilters.databaseFilter().test(dbName)) {
+ // continue;
+ // }
// First drop, create, and then use the named database ...
schema.applyDdl(source, dbName, "DROP DATABASE IF EXISTS " + quote(dbName), this::enqueueSchemaChanges);
schema.applyDdl(source, dbName, "CREATE DATABASE " + quote(dbName), this::enqueueSchemaChanges);
@@ -516,6 +519,9 @@ protected void execute() {
if (!isRunning()) {
break;
}
+ // if (!createTableFilters.tableFilter().test(tableId)) {
+ // continue;
+ // }
// This is to handle situation when global read lock is unavailable and tables are locked instead of it.
// MySQL forbids access to an unlocked table when there is at least one lock held on another table.
// Thus when we need to obtain schema even for non-monitored tables (which are not locked as we might not have access privileges)
@@ -867,7 +873,10 @@ private void readTableSchema(final AtomicReference sql, final JdbcConnec
}
private boolean shouldRecordTableSchema(final MySqlSchema schema, final Filters filters, TableId id) {
- return !schema.isStoreOnlyMonitoredTablesDdl() || filters.tableFilter().test(id);
+ if (!schema.isStoreOnlyMonitoredTablesDdl()) {
+ return !schema.isIgnoreBuildInTablesDdl() || !filters.builtInTableFilter().test(id);
+ }
+ return filters.tableFilter().test(id);
}
protected void readBinlogPosition(int step, SourceInfo source, JdbcConnection mysql, AtomicReference sql) throws SQLException {
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/AlterTableParserListener.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/AlterTableParserListener.java
index 355be0787fb..ceca2eac8af 100644
--- a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/AlterTableParserListener.java
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/AlterTableParserListener.java
@@ -39,7 +39,9 @@ public class AlterTableParserListener extends MySqlParserBaseListener {
private final List listeners;
private TableEditor tableEditor;
+ private ColumnEditor defaultValueColumnEditor;
private ColumnDefinitionParserListener columnDefinitionListener;
+ private DefaultValueParserListener defaultValueListener;
private List columnEditors;
private int parsingColumnIndex = STARTING_INDEX;
@@ -80,7 +82,7 @@ public void enterAlterByAddColumn(MySqlParser.AlterByAddColumnContext ctx) {
parser.runIfNotNull(() -> {
String columnName = parser.parseName(ctx.uid(0));
ColumnEditor columnEditor = Column.editor().name(columnName);
- columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser.dataTypeResolver(), parser.getConverters());
+ columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser, listeners);
listeners.add(columnDefinitionListener);
}, tableEditor);
super.exitAlterByAddColumn(ctx);
@@ -114,7 +116,7 @@ public void enterAlterByAddColumns(MySqlParser.AlterByAddColumnsContext ctx) {
String columnName = parser.parseName(uidContext);
columnEditors.add(Column.editor().name(columnName));
}
- columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditors.get(0), parser.dataTypeResolver(), parser.getConverters());
+ columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditors.get(0), parser, listeners);
listeners.add(columnDefinitionListener);
}, tableEditor);
super.enterAlterByAddColumns(ctx);
@@ -164,7 +166,7 @@ public void enterAlterByChangeColumn(MySqlParser.AlterByChangeColumnContext ctx)
ColumnEditor columnEditor = existingColumn.edit();
columnEditor.unsetDefaultValue();
- columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser.dataTypeResolver(), parser.getConverters());
+ columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser, listeners);
listeners.add(columnDefinitionListener);
}
else {
@@ -205,7 +207,7 @@ public void enterAlterByModifyColumn(MySqlParser.AlterByModifyColumnContext ctx)
ColumnEditor columnEditor = existingColumn.edit();
columnEditor.unsetDefaultValue();
- columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser.dataTypeResolver(), parser.getConverters());
+ columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser, listeners);
listeners.add(columnDefinitionListener);
}
else {
@@ -261,13 +263,26 @@ public void enterAlterByChangeDefault(MySqlParser.AlterByChangeDefaultContext ct
String columnName = parser.parseName(ctx.uid());
Column column = tableEditor.columnWithName(columnName);
if (column != null) {
- ColumnEditor columnEditor = column.edit();
- columnEditor.generated(ctx.DROP() != null);
+ defaultValueColumnEditor = column.edit();
+ if (ctx.SET() != null) {
+ defaultValueListener = new DefaultValueParserListener(defaultValueColumnEditor, parser.getConverters(), column.isOptional(), true);
+ listeners.add(defaultValueListener);
+ }
}
}, tableEditor);
super.enterAlterByChangeDefault(ctx);
}
+ @Override
+ public void exitAlterByChangeDefault(MySqlParser.AlterByChangeDefaultContext ctx) {
+ parser.runIfNotNull(() -> {
+ tableEditor.updateColumn(defaultValueColumnEditor.create());
+ listeners.remove(defaultValueListener);
+ defaultValueColumnEditor = null;
+ }, defaultValueColumnEditor);
+ super.exitAlterByChangeDefault(ctx);
+ }
+
@Override
public void enterAlterByAddPrimaryKey(MySqlParser.AlterByAddPrimaryKeyContext ctx) {
parser.runIfNotNull(() -> {
@@ -309,7 +324,7 @@ public void enterAlterByRenameColumn(MySqlParser.AlterByRenameColumnContext ctx)
ColumnEditor columnEditor = existingColumn.edit();
// columnEditor.unsetDefaultValue();
- columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser.dataTypeResolver(), parser.getConverters());
+ columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser, listeners);
listeners.add(columnDefinitionListener);
}
else {
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/ColumnDefinitionParserListener.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/ColumnDefinitionParserListener.java
index 3dcd3c59b22..0d2fb8b3991 100644
--- a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/ColumnDefinitionParserListener.java
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/ColumnDefinitionParserListener.java
@@ -10,13 +10,12 @@
import java.util.List;
import java.util.stream.Collectors;
+import org.antlr.v4.runtime.tree.ParseTreeListener;
+
import io.debezium.antlr.AntlrDdlParser;
import io.debezium.antlr.DataTypeResolver;
-import io.debezium.connector.mysql.MySqlDefaultValueConverter;
-import io.debezium.connector.mysql.MySqlValueConverters;
+import io.debezium.connector.mysql.antlr.MySqlAntlrDdlParser;
import io.debezium.ddl.parser.mysql.generated.MySqlParser;
-import io.debezium.ddl.parser.mysql.generated.MySqlParser.CurrentTimestampContext;
-import io.debezium.ddl.parser.mysql.generated.MySqlParser.DefaultValueContext;
import io.debezium.ddl.parser.mysql.generated.MySqlParserBaseListener;
import io.debezium.relational.Column;
import io.debezium.relational.ColumnEditor;
@@ -30,13 +29,15 @@
*/
public class ColumnDefinitionParserListener extends MySqlParserBaseListener {
+ private final MySqlAntlrDdlParser parser;
private final DataTypeResolver dataTypeResolver;
private final TableEditor tableEditor;
private ColumnEditor columnEditor;
private boolean uniqueColumn;
private Boolean optionalColumn;
+ private DefaultValueParserListener defaultValueListener;
- private final MySqlDefaultValueConverter defaultValueConverter;
+ private final List listeners;
/**
* Whether to convert the column's default value into the corresponding schema type or not. This is done for column
@@ -46,17 +47,19 @@ public class ColumnDefinitionParserListener extends MySqlParserBaseListener {
*/
private final boolean convertDefault;
- public ColumnDefinitionParserListener(TableEditor tableEditor, ColumnEditor columnEditor, DataTypeResolver dataTypeResolver, MySqlValueConverters converters,
- boolean convertDefault) {
+ public ColumnDefinitionParserListener(TableEditor tableEditor, ColumnEditor columnEditor, MySqlAntlrDdlParser parser,
+ List listeners, boolean convertDefault) {
this.tableEditor = tableEditor;
this.columnEditor = columnEditor;
- this.dataTypeResolver = dataTypeResolver;
+ this.parser = parser;
+ this.dataTypeResolver = parser.dataTypeResolver();
+ this.listeners = listeners;
this.convertDefault = convertDefault;
- this.defaultValueConverter = new MySqlDefaultValueConverter(converters);
}
- public ColumnDefinitionParserListener(TableEditor tableEditor, ColumnEditor columnEditor, DataTypeResolver dataTypeResolver, MySqlValueConverters converters) {
- this(tableEditor, columnEditor, dataTypeResolver, converters, true);
+ public ColumnDefinitionParserListener(TableEditor tableEditor, ColumnEditor columnEditor, MySqlAntlrDdlParser parser,
+ List listeners) {
+ this(tableEditor, columnEditor, parser, listeners, true);
}
public void setColumnEditor(ColumnEditor columnEditor) {
@@ -76,6 +79,10 @@ public void enterColumnDefinition(MySqlParser.ColumnDefinitionContext ctx) {
uniqueColumn = false;
optionalColumn = null;
resolveColumnDataType(ctx.dataType());
+ parser.runIfNotNull(() -> {
+ defaultValueListener = new DefaultValueParserListener(columnEditor, parser.getConverters(), optionalColumn, convertDefault);
+ listeners.add(defaultValueListener);
+ }, tableEditor);
super.enterColumnDefinition(ctx);
}
@@ -89,6 +96,9 @@ public void exitColumnDefinition(MySqlParser.ColumnDefinitionContext ctx) {
tableEditor.addColumn(columnEditor.create());
tableEditor.setPrimaryKeyNames(columnEditor.name());
}
+ parser.runIfNotNull(() -> {
+ listeners.remove(defaultValueListener);
+ }, tableEditor);
super.exitColumnDefinition(ctx);
}
@@ -114,50 +124,6 @@ public void enterNullNotnull(MySqlParser.NullNotnullContext ctx) {
super.enterNullNotnull(ctx);
}
- @Override
- public void enterDefaultValue(DefaultValueContext ctx) {
- String sign = "";
- if (ctx.NULL_LITERAL() != null) {
- return;
- }
- if (ctx.unaryOperator() != null) {
- sign = ctx.unaryOperator().getText();
- }
- if (ctx.constant() != null) {
- if (ctx.constant().stringLiteral() != null) {
- columnEditor.defaultValue(sign + unquote(ctx.constant().stringLiteral().getText()));
- }
- else if (ctx.constant().decimalLiteral() != null) {
- columnEditor.defaultValue(sign + ctx.constant().decimalLiteral().getText());
- }
- else if (ctx.constant().BIT_STRING() != null) {
- columnEditor.defaultValue(unquoteBinary(ctx.constant().BIT_STRING().getText()));
- }
- else if (ctx.constant().booleanLiteral() != null) {
- columnEditor.defaultValue(ctx.constant().booleanLiteral().getText());
- }
- else if (ctx.constant().REAL_LITERAL() != null) {
- columnEditor.defaultValue(ctx.constant().REAL_LITERAL().getText());
- }
- }
- else if (ctx.currentTimestamp() != null && !ctx.currentTimestamp().isEmpty()) {
- if (ctx.currentTimestamp().size() > 1 || (ctx.ON() == null && ctx.UPDATE() == null)) {
- final CurrentTimestampContext currentTimestamp = ctx.currentTimestamp(0);
- if (currentTimestamp.CURRENT_TIMESTAMP() != null || currentTimestamp.NOW() != null) {
- columnEditor.defaultValue("1970-01-01 00:00:00");
- }
- else {
- columnEditor.defaultValue(currentTimestamp.getText());
- }
- }
- }
- // For CREATE TABLE are all column default values converted only after charset is known
- if (convertDefault) {
- convertDefaultValueToSchemaType(columnEditor);
- }
- super.enterDefaultValue(ctx);
- }
-
@Override
public void enterAutoIncrementColumnConstraint(MySqlParser.AutoIncrementColumnConstraintContext ctx) {
columnEditor.autoIncremented(true);
@@ -297,20 +263,4 @@ private void serialColumn() {
columnEditor.autoIncremented(true);
columnEditor.generated(true);
}
-
- private void convertDefaultValueToSchemaType(ColumnEditor columnEditor) {
- if (optionalColumn != null) {
- columnEditor.optional(optionalColumn.booleanValue());
- }
-
- defaultValueConverter.setColumnDefaultValue(columnEditor);
- }
-
- private String unquote(String stringLiteral) {
- return stringLiteral.substring(1, stringLiteral.length() - 1);
- }
-
- private String unquoteBinary(String stringLiteral) {
- return stringLiteral.substring(2, stringLiteral.length() - 1);
- }
}
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/CreateTableParserListener.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/CreateTableParserListener.java
index 637e8735d76..8a3fd423eb1 100644
--- a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/CreateTableParserListener.java
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/CreateTableParserListener.java
@@ -97,7 +97,7 @@ public void enterColumnDeclaration(MySqlParser.ColumnDeclarationContext ctx) {
String columnName = parser.parseName(ctx.uid());
ColumnEditor columnEditor = Column.editor().name(columnName);
if (columnDefinitionListener == null) {
- columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser.dataTypeResolver(), parser.getConverters(), false);
+ columnDefinitionListener = new ColumnDefinitionParserListener(tableEditor, columnEditor, parser, listeners, false);
listeners.add(columnDefinitionListener);
}
else {
diff --git a/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/DefaultValueParserListener.java b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/DefaultValueParserListener.java
new file mode 100644
index 00000000000..6c078b51079
--- /dev/null
+++ b/debezium-connector-mysql/src/main/java/io/debezium/connector/mysql/antlr/listener/DefaultValueParserListener.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+
+package io.debezium.connector.mysql.antlr.listener;
+
+import io.debezium.connector.mysql.MySqlDefaultValueConverter;
+import io.debezium.connector.mysql.MySqlValueConverters;
+import io.debezium.ddl.parser.mysql.generated.MySqlParser.CurrentTimestampContext;
+import io.debezium.ddl.parser.mysql.generated.MySqlParser.DefaultValueContext;
+import io.debezium.ddl.parser.mysql.generated.MySqlParserBaseListener;
+import io.debezium.relational.ColumnEditor;
+
+/**
+ * Parser listener that is parsing default value definition part of MySQL statements.
+ *
+ * @author Jiri Pechanec
+ */
+public class DefaultValueParserListener extends MySqlParserBaseListener {
+
+ private final ColumnEditor columnEditor;
+ private final Boolean optionalColumn;
+
+ private final MySqlDefaultValueConverter defaultValueConverter;
+
+ /**
+ * Whether to convert the column's default value into the corresponding schema type or not. This is done for column
+ * definitions of ALTER TABLE statements but not for CREATE TABLE. In case of the latter, the default value
+ * conversion is handled by the CREATE TABLE statement listener itself, as a default character set given at the
+ * table level might have to be applied.
+ */
+ private final boolean convertDefault;
+
+ public DefaultValueParserListener(ColumnEditor columnEditor, MySqlValueConverters converters,
+ Boolean optionalColumn, boolean convertDefault) {
+ this.columnEditor = columnEditor;
+ this.defaultValueConverter = new MySqlDefaultValueConverter(converters);
+ this.optionalColumn = optionalColumn;
+ this.convertDefault = convertDefault;
+ }
+
+ @Override
+ public void enterDefaultValue(DefaultValueContext ctx) {
+ String sign = "";
+ if (ctx.NULL_LITERAL() != null) {
+ return;
+ }
+ if (ctx.unaryOperator() != null) {
+ sign = ctx.unaryOperator().getText();
+ }
+ if (ctx.constant() != null) {
+ if (ctx.constant().stringLiteral() != null) {
+ columnEditor.defaultValue(sign + unquote(ctx.constant().stringLiteral().getText()));
+ }
+ else if (ctx.constant().decimalLiteral() != null) {
+ columnEditor.defaultValue(sign + ctx.constant().decimalLiteral().getText());
+ }
+ else if (ctx.constant().BIT_STRING() != null) {
+ columnEditor.defaultValue(unquoteBinary(ctx.constant().BIT_STRING().getText()));
+ }
+ else if (ctx.constant().booleanLiteral() != null) {
+ columnEditor.defaultValue(ctx.constant().booleanLiteral().getText());
+ }
+ else if (ctx.constant().REAL_LITERAL() != null) {
+ columnEditor.defaultValue(ctx.constant().REAL_LITERAL().getText());
+ }
+ }
+ else if (ctx.currentTimestamp() != null && !ctx.currentTimestamp().isEmpty()) {
+ if (ctx.currentTimestamp().size() > 1 || (ctx.ON() == null && ctx.UPDATE() == null)) {
+ final CurrentTimestampContext currentTimestamp = ctx.currentTimestamp(0);
+ if (currentTimestamp.CURRENT_TIMESTAMP() != null || currentTimestamp.NOW() != null) {
+ columnEditor.defaultValue("1970-01-01 00:00:00");
+ }
+ else {
+ columnEditor.defaultValue(currentTimestamp.getText());
+ }
+ }
+ }
+ // For CREATE TABLE are all column default values converted only after charset is known
+ if (convertDefault) {
+ convertDefaultValueToSchemaType(columnEditor);
+ }
+ super.enterDefaultValue(ctx);
+ }
+
+ private void convertDefaultValueToSchemaType(ColumnEditor columnEditor) {
+ if (optionalColumn != null) {
+ columnEditor.optional(optionalColumn.booleanValue());
+ }
+
+ defaultValueConverter.setColumnDefaultValue(columnEditor);
+ }
+
+ private String unquote(String stringLiteral) {
+ return stringLiteral.substring(1, stringLiteral.length() - 1);
+ }
+
+ private String unquoteBinary(String stringLiteral) {
+ return stringLiteral.substring(2, stringLiteral.length() - 1);
+ }
+}
diff --git a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlAntlrDdlParserTest.java b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlAntlrDdlParserTest.java
index 8c2db53f696..7f70099525b 100644
--- a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlAntlrDdlParserTest.java
+++ b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlAntlrDdlParserTest.java
@@ -58,6 +58,28 @@ public void beforeEach() {
tables = new Tables();
}
+ @Test
+ @FixFor("DBZ-2061")
+ public void shouldUpdateSchemaForChangeDefaultValue() {
+ String ddl = "CREATE TABLE mytable (id INT PRIMARY KEY, val1 INT);"
+ + "ALTER TABLE mytable ADD COLUMN last_val INT DEFAULT 5;";
+ parser.parse(ddl, tables);
+ assertThat(((MySqlAntlrDdlParser) parser).getParsingExceptionsFromWalker().size()).isEqualTo(0);
+ assertThat(tables.size()).isEqualTo(1);
+
+ parser.parse("ALTER TABLE mytable ALTER COLUMN last_val SET DEFAULT 10;", tables);
+ assertThat(((MySqlAntlrDdlParser) parser).getParsingExceptionsFromWalker().size()).isEqualTo(0);
+ assertThat(tables.size()).isEqualTo(1);
+
+ Table table = tables.forTable(null, null, "mytable");
+ assertThat(table.columns()).hasSize(3);
+ assertThat(table.columnWithName("id")).isNotNull();
+ assertThat(table.columnWithName("val1")).isNotNull();
+ assertThat(table.columnWithName("last_val")).isNotNull();
+ assertThat(table.columnWithName("last_val").defaultValue()).isEqualTo(10);
+ }
+
+ @Test
@FixFor("DBZ-1833")
public void shouldNotUpdateExistingTable() {
String ddl = "CREATE TABLE mytable (id INT PRIMARY KEY, val1 INT)";
@@ -210,6 +232,16 @@ public void shouldNotGetExceptionOnParseAlterStatementsWithoutCreate() {
assertThat(tables.size()).isEqualTo(0);
}
+ @Test
+ @FixFor("DBZ-2067")
+ public void shouldSupportInstantAlgoOnAlterStatements() {
+ final String ddl = "CREATE TABLE foo (id SERIAL, c1 INT);" +
+ "ALTER TABLE foo ADD COLUMN c2 INT, ALGORITHM=INSTANT;";
+ parser.parse(ddl, tables);
+
+ assertThat(((MySqlAntlrDdlParser) parser).getParsingExceptionsFromWalker().size()).isEqualTo(0);
+ }
+
@Test
@FixFor("DBZ-1220")
public void shouldParseFloatVariants() {
@@ -942,14 +974,20 @@ public void shouldParseThirdPartyStorageEngine() {
+ " c1 INTEGER NOT NULL, " + System.lineSeparator()
+ " c2 VARCHAR(22) " + System.lineSeparator()
+ ") engine=Aria;";
- parser.parse(ddl1 + ddl2 + ddl3, tables);
- assertThat(tables.size()).isEqualTo(3);
+ String ddl4 = "CREATE TABLE escaped_foo ( " + System.lineSeparator()
+ + " c1 INTEGER NOT NULL, " + System.lineSeparator()
+ + " c2 VARCHAR(22) " + System.lineSeparator()
+ + ") engine=TokuDB `compression`=tokudb_zlib;";
+ parser.parse(ddl1 + ddl2 + ddl3 + ddl4, tables);
+ assertThat(tables.size()).isEqualTo(4);
listener.assertNext().createTableNamed("foo").ddlStartsWith("CREATE TABLE foo (");
listener.assertNext().createTableNamed("bar").ddlStartsWith("CREATE TABLE bar (");
listener.assertNext().createTableNamed("baz").ddlStartsWith("CREATE TABLE baz (");
+ listener.assertNext().createTableNamed("escaped_foo").ddlStartsWith("CREATE TABLE escaped_foo (");
parser.parse("DROP TABLE foo", tables);
parser.parse("DROP TABLE bar", tables);
parser.parse("DROP TABLE baz", tables);
+ parser.parse("DROP TABLE escaped_foo", tables);
assertThat(tables.size()).isEqualTo(0);
}
diff --git a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlConnectorIT.java b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlConnectorIT.java
index f003e69beca..295a4ff4758 100644
--- a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlConnectorIT.java
+++ b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlConnectorIT.java
@@ -1084,7 +1084,7 @@ private void dropDatabases() throws SQLException {
connection.query("SHOW DATABASES", rs -> {
while (rs.next()) {
final String dbName = rs.getString(1);
- if (!Filters.isBuiltInDatabase(dbName) && !dbName.equals(DATABASE.getDatabaseName())) {
+ if (!Filters.isBuiltInDatabase(dbName) && !dbName.equals(DATABASE.getDatabaseName()) && !dbName.equals("emptydb")) {
connection.execute("DROP DATABASE IF EXISTS " + dbName);
}
}
diff --git a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlTinyIntIT.java b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlTinyIntIT.java
index c02443d8c15..93a80d58cfd 100644
--- a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlTinyIntIT.java
+++ b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MySqlTinyIntIT.java
@@ -60,18 +60,13 @@ public void shouldHandleTinyIntAsNumber() throws SQLException, InterruptedExcept
// Use the DB configuration to define the connector's configuration ...
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
+ .with(MySqlConnectorConfig.TABLE_WHITELIST, DATABASE.qualifiedTableName("DBZ1773"))
.build();
// Start the connector ...
start(MySqlConnector.class, config);
- // ---------------------------------------------------------------------------------------------------------------
- // Consume all of the events due to startup and initialization of the database
- // ---------------------------------------------------------------------------------------------------------------
- final int numDatabase = 2;
- final int numTables = 2;
- final int numOthers = 2;
- consumeRecords(numDatabase + numTables + numOthers);
+ consumeInitial();
assertIntChangeRecord();
@@ -89,6 +84,7 @@ public void shouldHandleTinyIntOneAsBoolean() throws SQLException, InterruptedEx
// Use the DB configuration to define the connector's configuration ...
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
+ .with(MySqlConnectorConfig.TABLE_WHITELIST, DATABASE.qualifiedTableName("DBZ1773"))
.with(MySqlConnectorConfig.CUSTOM_CONVERTERS, "boolean")
.with("boolean.type", TinyIntOneToBooleanConverter.class.getName())
.with("boolean.selector", ".*DBZ1773.b")
@@ -97,13 +93,7 @@ public void shouldHandleTinyIntOneAsBoolean() throws SQLException, InterruptedEx
// Start the connector ...
start(MySqlConnector.class, config);
- // ---------------------------------------------------------------------------------------------------------------
- // Consume all of the events due to startup and initialization of the database
- // ---------------------------------------------------------------------------------------------------------------
- final int numDatabase = 2;
- final int numTables = 2;
- final int numOthers = 2;
- consumeRecords(numDatabase + numTables + numOthers);
+ consumeInitial();
assertBooleanChangeRecord();
@@ -115,6 +105,43 @@ public void shouldHandleTinyIntOneAsBoolean() throws SQLException, InterruptedEx
stopConnector();
}
+ @Test
+ @FixFor("DBZ-2085")
+ public void shouldDefaultValueForTinyIntOneAsBoolean() throws SQLException, InterruptedException {
+ // Use the DB configuration to define the connector's configuration ...
+ config = DATABASE.defaultConfig()
+ .with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
+ .with(MySqlConnectorConfig.TABLE_WHITELIST, DATABASE.qualifiedTableName("DBZ2085"))
+ .with(MySqlConnectorConfig.CUSTOM_CONVERTERS, "boolean")
+ .with("boolean.type", TinyIntOneToBooleanConverter.class.getName())
+ .with("boolean.selector", ".*DBZ2085.b")
+ .build();
+
+ // Start the connector ...
+ start(MySqlConnector.class, config);
+
+ consumeInitial();
+
+ assertDefaultValueBooleanChangeRecord();
+
+ try (final Connection conn = MySQLConnection.forTestDatabase(DATABASE.getDatabaseName()).connection()) {
+ conn.createStatement().execute("INSERT INTO DBZ2085 VALUES (DEFAULT, true)");
+ }
+ assertDefaultValueBooleanChangeRecord();
+
+ stopConnector();
+ }
+
+ private void consumeInitial() throws InterruptedException {
+ // ---------------------------------------------------------------------------------------------------------------
+ // Consume all of the events due to startup and initialization of the database
+ // ---------------------------------------------------------------------------------------------------------------
+ final int numDatabase = 2;
+ final int numTables = 4;
+ final int numOthers = 2;
+ consumeRecords(numDatabase + numTables + numOthers);
+ }
+
private void assertIntChangeRecord() throws InterruptedException {
final SourceRecord record = consumeRecord();
Assertions.assertThat(record).isNotNull();
@@ -136,4 +163,13 @@ private void assertBooleanChangeRecord() throws InterruptedException {
Assertions.assertThat(change.getInt16("ti2")).isEqualTo((short) 50);
Assertions.assertThat(change.getBoolean("b")).isEqualTo(true);
}
+
+ private void assertDefaultValueBooleanChangeRecord() throws InterruptedException {
+ final SourceRecord record = consumeRecord();
+ Assertions.assertThat(record).isNotNull();
+ final Struct change = ((Struct) record.value()).getStruct("after");
+
+ Assertions.assertThat(change.getBoolean("b")).isEqualTo(true);
+ Assertions.assertThat(change.schema().field("b").schema().defaultValue()).isEqualTo(false);
+ }
}
diff --git a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MysqlDefaultValueIT.java b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MysqlDefaultValueIT.java
index 32191e13e00..0430b086ba6 100644
--- a/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MysqlDefaultValueIT.java
+++ b/debezium-connector-mysql/src/test/java/io/debezium/connector/mysql/MysqlDefaultValueIT.java
@@ -37,7 +37,10 @@
import io.debezium.jdbc.JdbcConnection;
import io.debezium.jdbc.JdbcValueConverters;
import io.debezium.jdbc.TemporalPrecisionMode;
+import io.debezium.junit.EqualityCheck;
import io.debezium.junit.SkipWhenDatabaseVersion;
+import io.debezium.junit.SkipWhenKafkaVersion;
+import io.debezium.junit.SkipWhenKafkaVersion.KafkaVersion;
import io.debezium.relational.history.DatabaseHistory;
import io.debezium.time.MicroTimestamp;
import io.debezium.time.Timestamp;
@@ -78,6 +81,7 @@ public void afterEach() {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedTinyIntTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -111,6 +115,7 @@ public void unsignedTinyIntTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedSmallIntTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -150,6 +155,7 @@ private void assertEmptyFieldValue(SourceRecord record, String fieldName) {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedMediumIntTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -183,6 +189,7 @@ public void unsignedMediumIntTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedIntTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -216,6 +223,7 @@ public void unsignedIntTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedBigIntToLongTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -249,6 +257,7 @@ public void unsignedBigIntToLongTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedBigIntToBigDecimalTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -284,6 +293,7 @@ public void unsignedBigIntToBigDecimalTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void stringTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -316,6 +326,7 @@ public void stringTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void unsignedBitTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -352,6 +363,7 @@ public void unsignedBitTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void booleanTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -377,6 +389,7 @@ public void booleanTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void numberTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -408,6 +421,7 @@ public void numberTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void tinyIntBooleanTest() throws Exception {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -437,6 +451,7 @@ public void tinyIntBooleanTest() throws Exception {
@Test
@FixFor("DBZ-1689")
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void intBooleanTest() throws Exception {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -465,6 +480,7 @@ public void intBooleanTest() throws Exception {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void floatAndDoubleTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -485,6 +501,7 @@ public void floatAndDoubleTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void realTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -528,6 +545,7 @@ public void numericAndDecimalToDoubleTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void numericAndDecimalToDecimalTest() throws InterruptedException {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -617,6 +635,7 @@ public void dateAndTimeTest() throws InterruptedException {
}
@Test
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void timeTypeWithConnectMode() throws Exception {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -678,6 +697,7 @@ public void timeTypeWithConnectMode() throws Exception {
@Test
@FixFor("DBZ-771")
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void columnTypeAndDefaultValueChange() throws Exception {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
@@ -717,6 +737,7 @@ record = records.recordsForTopic(DATABASE.topicForTable("DBZ_771_CUSTOMERS")).ge
@Test
@FixFor({ "DBZ-771", "DBZ-1321" })
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void columnTypeChangeResetsDefaultValue() throws Exception {
config = DATABASE.defaultConfig()
.with(MySqlConnectorConfig.SNAPSHOT_MODE, MySqlConnectorConfig.SnapshotMode.INITIAL)
diff --git a/debezium-connector-mysql/src/test/resources/ddl/tinyint_test.sql b/debezium-connector-mysql/src/test/resources/ddl/tinyint_test.sql
index 220e54d19b9..a2bc65d7987 100644
--- a/debezium-connector-mysql/src/test/resources/ddl/tinyint_test.sql
+++ b/debezium-connector-mysql/src/test/resources/ddl/tinyint_test.sql
@@ -6,4 +6,11 @@ CREATE TABLE `DBZ1773` (
b BOOLEAN
) ENGINE=InnoDB AUTO_INCREMENT=10851 DEFAULT CHARSET=utf8;
-INSERT INTO DBZ1773 VALUES (DEFAULT, 100, 5, 50, TRUE);
\ No newline at end of file
+CREATE TABLE `DBZ2085` (
+ id int not null primary key auto_increment,
+ b BOOLEAN NOT NULL DEFAULT 0
+) ENGINE=InnoDB AUTO_INCREMENT=10851 DEFAULT CHARSET=utf8;
+
+INSERT INTO DBZ1773 VALUES (DEFAULT, 100, 5, 50, TRUE);
+INSERT INTO DBZ2085 VALUES (DEFAULT, 1);
+
diff --git a/debezium-connector-postgres/pom.xml b/debezium-connector-postgres/pom.xml
index fe59fd0065d..26c259ab233 100644
--- a/debezium-connector-postgres/pom.xml
+++ b/debezium-connector-postgres/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../4.0.0
@@ -287,6 +287,7 @@
${assembly.descriptor}
+ posix
diff --git a/debezium-connector-postgres/src/main/java/io/debezium/connector/postgresql/PostgresConnectorTask.java b/debezium-connector-postgres/src/main/java/io/debezium/connector/postgresql/PostgresConnectorTask.java
index 3faf915ec6b..0e56f8553c4 100644
--- a/debezium-connector-postgres/src/main/java/io/debezium/connector/postgresql/PostgresConnectorTask.java
+++ b/debezium-connector-postgres/src/main/java/io/debezium/connector/postgresql/PostgresConnectorTask.java
@@ -77,7 +77,6 @@ public ChangeEventSourceCoordinator start(Configuration config) {
final PostgresOffsetContext previousOffset = (PostgresOffsetContext) getPreviousOffset(new PostgresOffsetContext.Loader(connectorConfig));
final Clock clock = Clock.system();
- final SourceInfo sourceInfo = new SourceInfo(connectorConfig);
LoggingContext.PreviousContext previousContext = taskContext.configureLoggingContext(CONTEXT_NAME);
try {
// Print out the server information
@@ -98,7 +97,7 @@ public ChangeEventSourceCoordinator start(Configuration config) {
snapshotter.init(connectorConfig, null, slotInfo);
}
else {
- LOGGER.info("Found previous offset {}", sourceInfo);
+ LOGGER.info("Found previous offset {}", previousOffset);
snapshotter.init(connectorConfig, previousOffset.asOffsetState(), slotInfo);
}
diff --git a/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/DebeziumEngineIT.java b/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/DebeziumEngineIT.java
index cb4ac33a437..cff8ca1a492 100644
--- a/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/DebeziumEngineIT.java
+++ b/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/DebeziumEngineIT.java
@@ -21,6 +21,7 @@
import org.fest.assertions.Assertions;
import org.junit.Assert;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
import io.debezium.doc.FixFor;
@@ -32,6 +33,10 @@
import io.debezium.engine.format.Avro;
import io.debezium.engine.format.CloudEvents;
import io.debezium.engine.format.Json;
+import io.debezium.junit.EqualityCheck;
+import io.debezium.junit.SkipTestRule;
+import io.debezium.junit.SkipWhenKafkaVersion;
+import io.debezium.junit.SkipWhenKafkaVersion.KafkaVersion;
import io.debezium.util.LoggingContext;
import io.debezium.util.Testing;
@@ -44,6 +49,9 @@ public class DebeziumEngineIT {
protected static final Path OFFSET_STORE_PATH = Testing.Files.createTestingPath("connector-offsets.txt").toAbsolutePath();
+ @Rule
+ public SkipTestRule skipTest = new SkipTestRule();
+
@Before
public void before() throws SQLException {
OFFSET_STORE_PATH.getParent().toFile().mkdirs();
@@ -103,6 +111,7 @@ public void shouldSerializeToJson() throws Exception {
@Test
@FixFor("DBZ-1807")
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void shouldSerializeToAvro() throws Exception {
final Properties props = new Properties();
props.putAll(TestHelper.defaultConfig().build().asMap());
diff --git a/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/PostgresConnectorIT.java b/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/PostgresConnectorIT.java
index 5eccde1b313..814a90bc54a 100644
--- a/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/PostgresConnectorIT.java
+++ b/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/PostgresConnectorIT.java
@@ -66,7 +66,10 @@
import io.debezium.embedded.EmbeddedEngine;
import io.debezium.heartbeat.Heartbeat;
import io.debezium.jdbc.TemporalPrecisionMode;
+import io.debezium.junit.EqualityCheck;
import io.debezium.junit.SkipWhenDatabaseVersion;
+import io.debezium.junit.SkipWhenKafkaVersion;
+import io.debezium.junit.SkipWhenKafkaVersion.KafkaVersion;
import io.debezium.junit.logging.LogInterceptor;
import io.debezium.relational.RelationalDatabaseConnectorConfig;
import io.debezium.util.Strings;
@@ -1575,6 +1578,7 @@ record = recordsForTopicS2.remove(0);
@Test
@FixFor("DBZ-1292")
+ @SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public void shouldOutputRecordsInCloudEventsFormat() throws Exception {
TestHelper.execute(SETUP_TABLES_STMT);
Configuration.Builder configBuilder = TestHelper.defaultConfig()
diff --git a/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/TransactionMetadataIT.java b/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/TransactionMetadataIT.java
index f4136d14dac..c94226886f2 100644
--- a/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/TransactionMetadataIT.java
+++ b/debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/TransactionMetadataIT.java
@@ -26,8 +26,12 @@
import io.debezium.connector.postgresql.PostgresConnectorConfig.SnapshotMode;
import io.debezium.connector.postgresql.junit.SkipTestDependingOnDecoderPluginNameRule;
import io.debezium.embedded.AbstractConnectorTest;
+import io.debezium.junit.EqualityCheck;
+import io.debezium.junit.SkipWhenKafkaVersion;
+import io.debezium.junit.SkipWhenKafkaVersion.KafkaVersion;
import io.debezium.util.Collect;
+@SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public class TransactionMetadataIT extends AbstractConnectorTest {
private static final String INSERT_STMT = "INSERT INTO s1.a (aa) VALUES (1);" +
diff --git a/debezium-connector-sqlserver/pom.xml b/debezium-connector-sqlserver/pom.xml
index e632f48ce49..bf45b821c16 100644
--- a/debezium-connector-sqlserver/pom.xml
+++ b/debezium-connector-sqlserver/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../4.0.0
@@ -246,6 +246,7 @@
${assembly.descriptor}
+ posix
diff --git a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SourceTimestampMode.java b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SourceTimestampMode.java
new file mode 100644
index 00000000000..75c1abf7e05
--- /dev/null
+++ b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SourceTimestampMode.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.connector.sqlserver;
+
+import java.util.Arrays;
+
+import io.debezium.config.EnumeratedValue;
+
+/**
+ * Strategy for populating the source.ts_ms field in change events.
+ */
+public enum SourceTimestampMode implements EnumeratedValue {
+
+ /**
+ * This mode (default) will set the source timestamp field (ts_ms) of when the record was committed in the database.
+ */
+ COMMIT("commit"),
+
+ /**
+ * This mode will set the source timestamp field (ts_ms) of when the record was processed by Debezium.
+ */
+ PROCESSING("processing");
+
+ private final String value;
+
+ SourceTimestampMode(String value) {
+ this.value = value;
+ }
+
+ @Override
+ public String getValue() {
+ return value;
+ }
+
+ public static SourceTimestampMode getDefaultMode() {
+ return COMMIT;
+ }
+
+ static SourceTimestampMode fromMode(String mode) {
+ return Arrays.stream(SourceTimestampMode.values())
+ .filter(s -> s.name().equalsIgnoreCase(mode))
+ .findFirst()
+ .orElseGet(SourceTimestampMode::getDefaultMode);
+ }
+}
diff --git a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnection.java b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnection.java
index 183022b82bf..e3590cbd104 100644
--- a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnection.java
+++ b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnection.java
@@ -33,6 +33,7 @@
import io.debezium.relational.Table;
import io.debezium.relational.TableId;
import io.debezium.util.BoundedConcurrentHashMap;
+import io.debezium.util.Clock;
/**
* {@link JdbcConnection} extension to be used with Microsoft SQL Server
@@ -72,6 +73,8 @@ public class SqlServerConnection extends JdbcConnection {
*/
private final String realDatabaseName;
private final ZoneId transactionTimezone;
+ private final SourceTimestampMode sourceTimestampMode;
+ private final Clock clock;
public static interface ResultSetExtractor {
T apply(ResultSet rs) throws SQLException;
@@ -82,16 +85,18 @@ public static interface ResultSetExtractor {
/**
* Creates a new connection using the supplied configuration.
*
- * @param config
- * {@link Configuration} instance, may not be null.
+ * @param config {@link Configuration} instance, may not be null.
+ * @param sourceTimestampMode strategy for populating {@code source.ts_ms}.
*/
- public SqlServerConnection(Configuration config) {
+ public SqlServerConnection(Configuration config, Clock clock, SourceTimestampMode sourceTimestampMode) {
super(config, FACTORY);
lsnToInstantCache = new BoundedConcurrentHashMap<>(100);
realDatabaseName = retrieveRealDatabaseName();
boolean supportsAtTimeZone = supportsAtTimeZone();
transactionTimezone = retrieveTransactionTimezone(supportsAtTimeZone);
lsnToTimestamp = getLsnToTimestamp(supportsAtTimeZone);
+ this.clock = clock;
+ this.sourceTimestampMode = sourceTimestampMode;
}
/**
@@ -194,10 +199,16 @@ public Lsn incrementLsn(Lsn lsn) throws SQLException {
* Map a commit LSN to a point in time when the commit happened.
*
* @param lsn - LSN of the commit
- * @return time when the commit was recorded into the database log
+ * @return time when the commit was recorded into the database log or the
+ * current time, depending on the setting for the "source timestamp
+ * mode" option
* @throws SQLException
*/
public Instant timestampOfLsn(Lsn lsn) throws SQLException {
+ if (SourceTimestampMode.PROCESSING.equals(sourceTimestampMode)) {
+ return clock.currentTime();
+ }
+
if (lsn.getBinary() == null) {
return null;
}
diff --git a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorConfig.java b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorConfig.java
index 88a55e52917..378db13b3cd 100644
--- a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorConfig.java
+++ b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorConfig.java
@@ -43,6 +43,7 @@ public class SqlServerConnectorConfig extends HistorizedRelationalDatabaseConnec
private static final Logger LOGGER = LoggerFactory.getLogger(SqlServerConnectorConfig.class);
+ public static final String SOURCE_TIMESTAMP_MODE_CONFIG_NAME = "source.timestamp.mode";
protected static final int DEFAULT_PORT = 1433;
private static final String READ_ONLY_INTENT = "ReadOnly";
private static final String APPLICATION_INTENT_KEY = "database.applicationIntent";
@@ -272,6 +273,18 @@ public static SnapshotIsolationMode parse(String value, String defaultValue) {
.withDescription("The timezone of the server used to correctly shift the commit transaction timestamp on the client side"
+ "Options include: Any valid Java ZoneId");
+ public static final Field SOURCE_TIMESTAMP_MODE = Field.create(SOURCE_TIMESTAMP_MODE_CONFIG_NAME)
+ .withDisplayName("Source timestamp mode")
+ .withDefault(SourceTimestampMode.COMMIT.getValue())
+ .withType(Type.STRING)
+ .withWidth(Width.SHORT)
+ .withImportance(Importance.LOW)
+ .withDescription("Configures the criteria of the attached timestamp within the source record (ts_ms)." +
+ "Options include:" +
+ "'" + SourceTimestampMode.COMMIT.getValue() + "', (default) the source timestamp is set to the instant where the record was committed in the database"
+ +
+ "'" + SourceTimestampMode.PROCESSING.getValue() + "', the source timestamp is set to the instant where the record was processed by Debezium.");
+
public static final Field SNAPSHOT_MODE = Field.create("snapshot.mode")
.withDisplayName("Snapshot mode")
.withEnum(SnapshotMode.class, SnapshotMode.INITIAL)
@@ -311,7 +324,8 @@ public static SnapshotIsolationMode parse(String value, String defaultValue) {
SERVER_TIMEZONE)
.connector(
SNAPSHOT_MODE,
- SNAPSHOT_ISOLATION_MODE)
+ SNAPSHOT_ISOLATION_MODE,
+ SOURCE_TIMESTAMP_MODE)
.excluding(
SCHEMA_WHITELIST,
SCHEMA_BLACKLIST)
@@ -329,6 +343,7 @@ public static ConfigDef configDef() {
private final String databaseName;
private final SnapshotMode snapshotMode;
private final SnapshotIsolationMode snapshotIsolationMode;
+ private final SourceTimestampMode sourceTimestampMode;
private final ColumnNameFilter columnFilter;
private final boolean readOnlyDatabaseConnection;
@@ -347,6 +362,8 @@ public SqlServerConnectorConfig(Configuration config) {
else {
this.snapshotIsolationMode = SnapshotIsolationMode.parse(config.getString(SNAPSHOT_ISOLATION_MODE), SNAPSHOT_ISOLATION_MODE.defaultValueAsString());
}
+
+ this.sourceTimestampMode = SourceTimestampMode.fromMode(config.getString(SOURCE_TIMESTAMP_MODE_CONFIG_NAME));
}
private static ColumnNameFilter getColumnNameFilter(String excludedColumnPatterns) {
@@ -374,6 +391,10 @@ public SnapshotMode getSnapshotMode() {
return snapshotMode;
}
+ public SourceTimestampMode getSourceTimestampMode() {
+ return sourceTimestampMode;
+ }
+
public ColumnNameFilter getColumnFilter() {
return columnFilter;
}
diff --git a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorTask.java b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorTask.java
index 35a1a36c355..b8383fa4412 100644
--- a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorTask.java
+++ b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerConnectorTask.java
@@ -57,6 +57,7 @@ public String version() {
@Override
public ChangeEventSourceCoordinator start(Configuration config) {
+ final Clock clock = Clock.system();
final SqlServerConnectorConfig connectorConfig = new SqlServerConnectorConfig(config);
final TopicSelector topicSelector = SqlServerTopicSelector.defaultSelector(connectorConfig);
final SchemaNameAdjuster schemaNameAdjuster = SchemaNameAdjuster.create(LOGGER);
@@ -70,8 +71,8 @@ public ChangeEventSourceCoordinator start(Configuration config) {
final Configuration jdbcConfig = config.filter(
x -> !(x.startsWith(DatabaseHistory.CONFIGURATION_FIELD_PREFIX_STRING) || x.equals(HistorizedRelationalDatabaseConnectorConfig.DATABASE_HISTORY.name())))
.subset("database.", true);
- dataConnection = new SqlServerConnection(jdbcConfig);
- metadataConnection = new SqlServerConnection(jdbcConfig);
+ dataConnection = new SqlServerConnection(jdbcConfig, clock, connectorConfig.getSourceTimestampMode());
+ metadataConnection = new SqlServerConnection(jdbcConfig, clock, connectorConfig.getSourceTimestampMode());
try {
dataConnection.setAutoCommit(false);
}
@@ -88,8 +89,6 @@ public ChangeEventSourceCoordinator start(Configuration config) {
taskContext = new SqlServerTaskContext(connectorConfig, schema);
- final Clock clock = Clock.system();
-
// Set up the task record queue ...
this.queue = new ChangeEventQueue.Builder()
.pollInterval(connectorConfig.getPollInterval())
diff --git a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerStreamingChangeEventSource.java b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerStreamingChangeEventSource.java
index 145723f5548..b45d5e3342d 100644
--- a/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerStreamingChangeEventSource.java
+++ b/debezium-connector-sqlserver/src/main/java/io/debezium/connector/sqlserver/SqlServerStreamingChangeEventSource.java
@@ -239,7 +239,8 @@ public void execute(ChangeEventSourceContext context) throws InterruptedExceptio
final Object[] dataNext = (operation == SqlServerChangeRecordEmitter.OP_UPDATE_BEFORE) ? tableWithSmallestLsn.getData() : null;
offsetContext.setChangePosition(tableWithSmallestLsn.getChangePosition(), eventCount);
- offsetContext.event(tableWithSmallestLsn.getChangeTable().getSourceTableId(),
+ offsetContext.event(
+ tableWithSmallestLsn.getChangeTable().getSourceTableId(),
metadataConnection.timestampOfLsn(tableWithSmallestLsn.getChangePosition().getCommitLsn()));
dispatcher
diff --git a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/EventProcessingFailureHandlingIT.java b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/EventProcessingFailureHandlingIT.java
index 51532d2e101..f0fc9d1b430 100644
--- a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/EventProcessingFailureHandlingIT.java
+++ b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/EventProcessingFailureHandlingIT.java
@@ -5,8 +5,6 @@
*/
package io.debezium.connector.sqlserver;
-import static org.fest.assertions.Assertions.assertThat;
-
import java.sql.SQLException;
import java.util.concurrent.TimeUnit;
@@ -83,7 +81,12 @@ public void warn() throws Exception {
SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE);
Assertions.assertThat(records.recordsForTopic("server1.dbo.tablea")).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(records.recordsForTopic("server1.dbo.tableb")).isNull();
- assertThat(logInterceptor.containsWarnMessage("Error while processing event at offset {")).isTrue();
+
+ Awaitility.await()
+ .alias("Found warning message in logs")
+ .atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> {
+ return logInterceptor.containsWarnMessage("Error while processing event at offset {");
+ });
}
@Test
@@ -135,9 +138,11 @@ public void fail() throws Exception {
SourceRecords records = consumeRecordsByTopic(1);
Assertions.assertThat(records.recordsForTopic("server1.dbo.tablea")).hasSize(1);
- assertThat(logInterceptor.containsStacktraceElement("Error while processing event at offset {")).isTrue();
- Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> {
- return !engine.isRunning();
- });
+ Awaitility.await()
+ .alias("Found warning message in logs")
+ .atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> {
+ boolean foundErrorMessageInLogs = logInterceptor.containsStacktraceElement("Error while processing event at offset {");
+ return foundErrorMessageInLogs && !engine.isRunning();
+ });
}
}
diff --git a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SourceTimestampModeTest.java b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SourceTimestampModeTest.java
new file mode 100644
index 00000000000..9e70470da2a
--- /dev/null
+++ b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SourceTimestampModeTest.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.connector.sqlserver;
+
+import static org.fest.assertions.Assertions.assertThat;
+
+import org.junit.Test;
+
+import io.debezium.doc.FixFor;
+
+public class SourceTimestampModeTest {
+
+ @Test
+ @FixFor("DBZ-1988")
+ public void shouldConfigureDefaultMode() {
+ assertThat(SourceTimestampMode.getDefaultMode()).isEqualTo(SourceTimestampMode.COMMIT);
+ }
+
+ @Test
+ @FixFor("DBZ-1988")
+ public void shouldReturnOptionFromValidMode() {
+ assertThat(SourceTimestampMode.fromMode("processing")).isEqualTo(SourceTimestampMode.PROCESSING);
+ }
+
+ @Test
+ @FixFor("DBZ-1988")
+ public void shouldReturnDefaultIfGivenModeIsNull() {
+ assertThat(SourceTimestampMode.fromMode(null)).isEqualTo(SourceTimestampMode.getDefaultMode());
+ }
+}
diff --git a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SqlServerConnectorIT.java b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SqlServerConnectorIT.java
index daa235260f0..c98133c6d9f 100644
--- a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SqlServerConnectorIT.java
+++ b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SqlServerConnectorIT.java
@@ -1370,6 +1370,36 @@ public void shouldDetectPurgedHistory() throws Exception {
.isTrue();
}
+ @Test
+ @FixFor("DBZ-1988")
+ public void shouldHonorSourceTimestampMode() throws InterruptedException, SQLException {
+ connection.execute("CREATE TABLE source_timestamp_mode (id int, name varchar(30) primary key(id))");
+ TestHelper.enableTableCdc(connection, "source_timestamp_mode");
+
+ final Configuration config = TestHelper.defaultConfig()
+ .with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
+ .with(SqlServerConnectorConfig.TABLE_WHITELIST, "dbo.source_timestamp_mode")
+ .with(SqlServerConnectorConfig.SOURCE_TIMESTAMP_MODE, "processing")
+ .build();
+
+ start(SqlServerConnector.class, config);
+ assertConnectorIsRunning();
+ waitForSnapshotToBeCompleted("sql_server", "server1");
+ connection.execute("INSERT INTO source_timestamp_mode VALUES(1, 'abc')");
+
+ SourceRecords records = consumeRecordsByTopic(1);
+ List recordsForTopic = records.recordsForTopic("server1.dbo.source_timestamp_mode");
+ SourceRecord record = recordsForTopic.get(0);
+
+ long eventTs = (long) ((Struct) record.value()).get("ts_ms");
+ long sourceTs = (long) ((Struct) ((Struct) record.value()).get("source")).get("ts_ms");
+
+ // it's not exactly the same as ts_ms, but close enough;
+ assertThat(eventTs - sourceTs).isLessThan(100);
+
+ stopConnector();
+ }
+
private void assertRecord(Struct record, List expected) {
expected.forEach(schemaAndValueField -> schemaAndValueField.assertFor(record));
}
diff --git a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/TransactionMetadataIT.java b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/TransactionMetadataIT.java
index ec76ce4e9f0..31f0620809f 100644
--- a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/TransactionMetadataIT.java
+++ b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/TransactionMetadataIT.java
@@ -18,6 +18,7 @@
import org.fest.assertions.Assertions;
import org.junit.After;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
import io.debezium.config.Configuration;
@@ -26,6 +27,10 @@
import io.debezium.data.Envelope;
import io.debezium.data.SchemaAndValueField;
import io.debezium.embedded.AbstractConnectorTest;
+import io.debezium.junit.EqualityCheck;
+import io.debezium.junit.SkipTestRule;
+import io.debezium.junit.SkipWhenKafkaVersion;
+import io.debezium.junit.SkipWhenKafkaVersion.KafkaVersion;
import io.debezium.util.Collect;
import io.debezium.util.Testing;
@@ -34,10 +39,14 @@
*
* @author Jiri Pechanec
*/
+@SkipWhenKafkaVersion(check = EqualityCheck.EQUAL, value = KafkaVersion.KAFKA_1XX, description = "Not compatible with Kafka 1.x")
public class TransactionMetadataIT extends AbstractConnectorTest {
private SqlServerConnection connection;
+ @Rule
+ public SkipTestRule skipRule = new SkipTestRule();
+
@Before
public void before() throws SQLException {
TestHelper.createTestDatabase();
diff --git a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/util/TestHelper.java b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/util/TestHelper.java
index fe322c94208..5cab785cf65 100644
--- a/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/util/TestHelper.java
+++ b/debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/util/TestHelper.java
@@ -24,6 +24,7 @@
import org.slf4j.LoggerFactory;
import io.debezium.config.Configuration;
+import io.debezium.connector.sqlserver.SourceTimestampMode;
import io.debezium.connector.sqlserver.SqlServerConnection;
import io.debezium.connector.sqlserver.SqlServerConnectorConfig;
import io.debezium.jdbc.JdbcConfiguration;
@@ -185,11 +186,11 @@ private static void dropTestDatabase(SqlServerConnection connection) throws SQLE
}
public static SqlServerConnection adminConnection() {
- return new SqlServerConnection(TestHelper.adminJdbcConfig());
+ return new SqlServerConnection(TestHelper.adminJdbcConfig(), Clock.system(), SourceTimestampMode.getDefaultMode());
}
public static SqlServerConnection testConnection() {
- return new SqlServerConnection(TestHelper.defaultJdbcConfig());
+ return new SqlServerConnection(TestHelper.defaultJdbcConfig(), Clock.system(), SourceTimestampMode.getDefaultMode());
}
/**
diff --git a/debezium-core/pom.xml b/debezium-core/pom.xml
index e4fe89ef495..d254e064c70 100644
--- a/debezium-core/pom.xml
+++ b/debezium-core/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
diff --git a/debezium-core/src/main/java/io/debezium/relational/NoOpTableEditorImpl.java b/debezium-core/src/main/java/io/debezium/relational/NoOpTableEditorImpl.java
index f4cb62656c5..f572799f439 100644
--- a/debezium-core/src/main/java/io/debezium/relational/NoOpTableEditorImpl.java
+++ b/debezium-core/src/main/java/io/debezium/relational/NoOpTableEditorImpl.java
@@ -105,6 +105,11 @@ public TableEditor removeColumn(String columnName) {
return this;
}
+ @Override
+ public TableEditor updateColumn(Column column) {
+ return this;
+ }
+
@Override
public TableEditor reorderColumn(String columnName, String afterColumnName) {
return this;
diff --git a/debezium-core/src/main/java/io/debezium/relational/TableEditor.java b/debezium-core/src/main/java/io/debezium/relational/TableEditor.java
index 124cd70626e..87644225265 100644
--- a/debezium-core/src/main/java/io/debezium/relational/TableEditor.java
+++ b/debezium-core/src/main/java/io/debezium/relational/TableEditor.java
@@ -148,6 +148,15 @@ default TableEditor addColumn(Column column) {
*/
TableEditor removeColumn(String columnName);
+ /**
+ * Update the column with the given name. The existing column definition with the name as the column provided
+ * is replaced with the new one.
+ *
+ * @param column the new column definition
+ * @return this editor so callers can chain methods together
+ */
+ TableEditor updateColumn(Column column);
+
/**
* Reorder the column with the given name to be positioned after the designated column. If {@code afterColumnName} is null,
* the column will be moved to the first column.
diff --git a/debezium-core/src/main/java/io/debezium/relational/TableEditorImpl.java b/debezium-core/src/main/java/io/debezium/relational/TableEditorImpl.java
index 2dac21cd84a..3d893594396 100644
--- a/debezium-core/src/main/java/io/debezium/relational/TableEditorImpl.java
+++ b/debezium-core/src/main/java/io/debezium/relational/TableEditorImpl.java
@@ -11,6 +11,7 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
final class TableEditorImpl implements TableEditor {
@@ -172,6 +173,14 @@ public TableEditor removeColumn(String columnName) {
return this;
}
+ @Override
+ public TableEditor updateColumn(Column newColumn) {
+ setColumns(columns().stream()
+ .map(c -> c.name().equals(newColumn.name()) ? newColumn : c)
+ .collect(Collectors.toList()));
+ return this;
+ }
+
@Override
public TableEditor reorderColumn(String columnName, String afterColumnName) {
Column columnToMove = columnWithName(columnName);
diff --git a/debezium-core/src/main/java/io/debezium/relational/TableSchemaBuilder.java b/debezium-core/src/main/java/io/debezium/relational/TableSchemaBuilder.java
index ba4a3b873a2..8866766415c 100644
--- a/debezium-core/src/main/java/io/debezium/relational/TableSchemaBuilder.java
+++ b/debezium-core/src/main/java/io/debezium/relational/TableSchemaBuilder.java
@@ -363,7 +363,7 @@ private ValueConverter wrapInMappingConverterIfNeeded(ColumnMappers mappers, Tab
* @param mapper the mapping function for the column; may be null if the columns is not to be mapped to different values
*/
protected void addField(SchemaBuilder builder, Table table, Column column, ColumnMapper mapper) {
- SchemaBuilder fieldBuilder = customConverterRegistry.registerConverterFor(table.id(), column)
+ final SchemaBuilder fieldBuilder = customConverterRegistry.registerConverterFor(table.id(), column)
.orElse(valueConverterProvider.schemaBuilder(column));
if (fieldBuilder != null) {
@@ -377,7 +377,8 @@ protected void addField(SchemaBuilder builder, Table table, Column column, Colum
// if the default value is provided
if (column.hasDefaultValue()) {
- fieldBuilder.defaultValue(column.defaultValue());
+ fieldBuilder
+ .defaultValue(customConverterRegistry.getValueConverter(table.id(), column).orElse(ValueConverter.passthrough()).convert(column.defaultValue()));
}
builder.field(fieldNamer.fieldNameFor(column), fieldBuilder.build());
diff --git a/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordState.java b/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordState.java
index 92e0848ef7d..e59ccfe3d02 100644
--- a/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordState.java
+++ b/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordState.java
@@ -17,7 +17,6 @@
import java.util.stream.Collectors;
import org.apache.kafka.common.config.ConfigDef;
-import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
@@ -71,8 +70,6 @@ public class ExtractNewRecordState> implements Transf
private boolean dropTombstones;
private DeleteHandling handleDeletes;
- private boolean addOperationHeader;
- private List addSourceFields;
private List additionalHeaders;
private List additionalFields;
private String routeByField;
@@ -96,10 +93,6 @@ public void configure(final Map configs) {
dropTombstones = config.getBoolean(ExtractNewRecordStateConfigDefinition.DROP_TOMBSTONES);
handleDeletes = DeleteHandling.parse(config.getString(ExtractNewRecordStateConfigDefinition.HANDLE_DELETES));
- addOperationHeader = config.getBoolean(ExtractNewRecordStateConfigDefinition.OPERATION_HEADER);
-
- addSourceFields = determineAdditionalSourceField(config.getString(ExtractNewRecordStateConfigDefinition.ADD_SOURCE_FIELDS));
-
additionalFields = FieldReference.fromConfiguration(config.getString(ExtractNewRecordStateConfigDefinition.ADD_FIELDS));
additionalHeaders = FieldReference.fromConfiguration(config.getString(ExtractNewRecordStateConfigDefinition.ADD_HEADERS));
@@ -127,20 +120,8 @@ public void configure(final Map configs) {
schemaUpdateCache = new BoundedConcurrentHashMap<>(SCHEMA_CACHE_SIZE);
}
- private static List determineAdditionalSourceField(String addSourceFieldsConfig) {
- if (Strings.isNullOrEmpty(addSourceFieldsConfig)) {
- return Collections.emptyList();
- }
- else {
- return Arrays.stream(addSourceFieldsConfig.split(","))
- .map(String::trim)
- .collect(Collectors.toList());
- }
- }
-
@Override
public R apply(final R record) {
- Envelope.Operation operation;
if (record.value() == null) {
if (dropTombstones) {
LOGGER.trace("Tombstone {} arrived and requested to be dropped", record.key());
@@ -150,11 +131,6 @@ public R apply(final R record) {
Headers headersToAdd = makeHeaders(additionalHeaders, (Struct) record.value());
headersToAdd.forEach(h -> record.headers().add(h));
}
- else if (addOperationHeader) {
- operation = Envelope.Operation.DELETE;
- LOGGER.warn("operation.header has been deprecated and is scheduled for removal. Use add.headers instead.");
- record.headers().addString(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY, operation.toString());
- }
return record;
}
@@ -166,18 +142,6 @@ else if (addOperationHeader) {
Headers headersToAdd = makeHeaders(additionalHeaders, (Struct) record.value());
headersToAdd.forEach(h -> record.headers().add(h));
}
- else if (addOperationHeader) {
- LOGGER.warn("operation.header has been deprecated and is scheduled for removal. Use add.headers instead.");
- String operationString = ((Struct) record.value()).getString("op");
- operation = Envelope.Operation.forCode(operationString);
-
- if (operationString.isEmpty() || operation == null) {
- LOGGER.warn("Unknown operation thus unable to add the operation header into the message");
- }
- else {
- record.headers().addString(ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY, operation.code());
- }
- }
R newRecord = afterDelegate.apply(record);
if (newRecord.value() == null) {
@@ -195,7 +159,7 @@ else if (addOperationHeader) {
case REWRITE:
LOGGER.trace("Delete message {} requested to be rewritten", record.key());
R oldRecord = beforeDelegate.apply(record);
- oldRecord = !additionalFields.isEmpty() ? addFields(additionalFields, record, oldRecord) : addSourceFields(addSourceFields, record, oldRecord);
+ oldRecord = addFields(additionalFields, record, oldRecord);
return removedDelegate.apply(oldRecord);
default:
@@ -210,7 +174,7 @@ else if (addOperationHeader) {
newRecord = setTopic(newTopicName, newRecord);
}
- newRecord = !additionalFields.isEmpty() ? addFields(additionalFields, record, newRecord) : addSourceFields(addSourceFields, record, newRecord);
+ newRecord = addFields(additionalFields, record, newRecord);
// Handling insert and update records
switch (handleDeletes) {
@@ -308,62 +272,13 @@ private Struct updateValue(FieldReference fieldReference, Struct updatedValue, S
return updatedValue.put(fieldReference.getNewFieldName(), fieldReference.getValue(struct));
}
- private R addSourceFields(List addSourceFields, R originalRecord, R unwrappedRecord) {
- // Return if no source fields to add
- if (addSourceFields.isEmpty()) {
- return unwrappedRecord;
- }
-
- LOGGER.warn("add.source.fields has been deprecated and is scheduled for removal. Use add.fields instead.");
-
- final Struct value = requireStruct(unwrappedRecord.value(), PURPOSE);
- Struct source = ((Struct) originalRecord.value()).getStruct("source");
-
- // Get (or compute) the updated schema from the cache
- Schema updatedSchema = schemaUpdateCache.computeIfAbsent(value.schema(), s -> makeUpdatedSchema(s, source.schema(), addSourceFields));
-
- // Create the updated struct
- final Struct updatedValue = new Struct(updatedSchema);
- for (org.apache.kafka.connect.data.Field field : value.schema().fields()) {
- updatedValue.put(field.name(), value.get(field));
- }
- for (String sourceField : addSourceFields) {
- updatedValue.put(ExtractNewRecordStateConfigDefinition.METADATA_FIELD_PREFIX + sourceField, source.get(sourceField));
- }
-
- return unwrappedRecord.newRecord(
- unwrappedRecord.topic(),
- unwrappedRecord.kafkaPartition(),
- unwrappedRecord.keySchema(),
- unwrappedRecord.key(),
- updatedSchema,
- updatedValue,
- unwrappedRecord.timestamp());
- }
-
- private Schema makeUpdatedSchema(Schema schema, Schema sourceSchema, List addSourceFields) {
- final SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
- // Get fields from original schema
- for (org.apache.kafka.connect.data.Field field : schema.fields()) {
- builder.field(field.name(), field.schema());
- }
- // Add the requested source fields, throw exception if a specified source field is not part of the source schema
- for (String sourceField : addSourceFields) {
- if (sourceSchema.field(sourceField) == null) {
- throw new ConfigException("Source field specified in 'add.source.fields' does not exist: " + sourceField);
- }
- builder.field(
- ExtractNewRecordStateConfigDefinition.METADATA_FIELD_PREFIX + sourceField,
- sourceSchema.field(sourceField).schema());
- }
- return builder.build();
- }
-
@Override
public ConfigDef config() {
final ConfigDef config = new ConfigDef();
- Field.group(config, null, ExtractNewRecordStateConfigDefinition.DROP_TOMBSTONES, ExtractNewRecordStateConfigDefinition.HANDLE_DELETES,
- ExtractNewRecordStateConfigDefinition.OPERATION_HEADER);
+ Field.group(config, null, ExtractNewRecordStateConfigDefinition.DROP_TOMBSTONES,
+ ExtractNewRecordStateConfigDefinition.HANDLE_DELETES, ExtractNewRecordStateConfigDefinition.ADD_FIELDS,
+ ExtractNewRecordStateConfigDefinition.ADD_HEADERS,
+ ExtractNewRecordStateConfigDefinition.ROUTE_BY_FIELD);
return config;
}
diff --git a/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordStateConfigDefinition.java b/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordStateConfigDefinition.java
index c04543ad5a1..3988b67d393 100644
--- a/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordStateConfigDefinition.java
+++ b/debezium-core/src/main/java/io/debezium/transforms/ExtractNewRecordStateConfigDefinition.java
@@ -12,7 +12,7 @@
public class ExtractNewRecordStateConfigDefinition {
- public static final String DEBEZIUM_OPERATION_HEADER_KEY = "__debezium-operation";
+ public static final String DEBEZIUM_OPERATION_HEADER_KEY = "__op";
public static final String DELETED_FIELD = "__deleted";
public static final String METADATA_FIELD_PREFIX = "__";
@@ -87,16 +87,6 @@ public static DeleteHandling parse(String value, String defaultValue) {
+ "drop - records are removed (the default),"
+ "rewrite - __deleted field is added to records.");
- public static final Field OPERATION_HEADER = Field.create("operation.header")
- .withDisplayName("Adds a message header representing the applied operation")
- .withType(ConfigDef.Type.BOOLEAN)
- .withWidth(ConfigDef.Width.SHORT)
- .withImportance(ConfigDef.Importance.LOW)
- .withDefault(false)
- .withDescription("DEPRECATED. Please use the 'add.fields' option instead. "
- + "Adds the operation type of the change event as a header."
- + "Its key is '" + ExtractNewRecordStateConfigDefinition.DEBEZIUM_OPERATION_HEADER_KEY + "'");
-
public static final Field ROUTE_BY_FIELD = Field.create("route.by.field")
.withDisplayName("The column which determines how the events will be routed, the value will replace the topic name.")
.withType(ConfigDef.Type.STRING)
@@ -104,16 +94,6 @@ public static DeleteHandling parse(String value, String defaultValue) {
.withImportance(ConfigDef.Importance.LOW)
.withDefault("");
- public static final Field ADD_SOURCE_FIELDS = Field.create("add.source.fields")
- .withDisplayName("Adds the specified fields from the 'source' field from the payload if they exist.")
- .withType(ConfigDef.Type.LIST)
- .withWidth(ConfigDef.Width.LONG)
- .withImportance(ConfigDef.Importance.LOW)
- .withDefault("")
- .withDescription("DEPRECATED. Please use the 'add.fields' option instead. "
- + "Adds each field listed from the 'source' element of the payload, prefixed with__ "
- + "Example: 'version,connector' would add __version and __connector fields");
-
public static final Field ADD_FIELDS = Field.create("add.fields")
.withDisplayName("Adds the specified field(s) to the message if they exist.")
.withType(ConfigDef.Type.LIST)
diff --git a/debezium-core/src/main/java/io/debezium/transforms/ScriptingTransformation.java b/debezium-core/src/main/java/io/debezium/transforms/ScriptingTransformation.java
index edbab794a62..a6852f8ce92 100644
--- a/debezium-core/src/main/java/io/debezium/transforms/ScriptingTransformation.java
+++ b/debezium-core/src/main/java/io/debezium/transforms/ScriptingTransformation.java
@@ -6,6 +6,7 @@
package io.debezium.transforms;
import java.util.Map;
+import java.util.regex.Pattern;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.connector.ConnectRecord;
@@ -21,6 +22,7 @@
import io.debezium.transforms.scripting.Engine;
import io.debezium.transforms.scripting.GraalJsEngine;
import io.debezium.transforms.scripting.Jsr223Engine;
+import io.debezium.util.Strings;
/**
* This is a base class for any SMT using scripting languages.
@@ -93,13 +95,21 @@ public static NullHandling parse(String value, String defaultValue) {
}
}
+ private static final Field TOPIC_REGEX = Field.create("topic.regex")
+ .withDisplayName("Topic regex")
+ .withType(ConfigDef.Type.STRING)
+ .withWidth(ConfigDef.Width.LONG)
+ .withImportance(ConfigDef.Importance.LOW)
+ .withValidation(Field::isRegex)
+ .withDescription("A regex used for selecting the topic(s) to which this transformation should be applied.");
+
public static final Field LANGUAGE = Field.create("language")
.withDisplayName("Expression language")
.withType(ConfigDef.Type.STRING)
.withWidth(ConfigDef.Width.MEDIUM)
.withImportance(ConfigDef.Importance.HIGH)
.withValidation(Field::isRequired)
- .withDescription("An expression language used to evaluate the expression. 'groovy' and 'graal.js' are supported.");
+ .withDescription("An expression language used to evaluate the expression. Must begin with 'jsr223.', e.g. 'jsr223.groovy' or 'jsr223.graal.js'.");
public static final Field NULL_HANDLING = Field.create("null.handling.mode")
.withDisplayName("Handle null records")
@@ -113,12 +123,13 @@ public static NullHandling parse(String value, String defaultValue) {
protected Engine engine;
private NullHandling nullHandling;
+ private Pattern topicPattern;
@Override
public void configure(Map configs) {
final Configuration config = Configuration.from(configs);
- final Field.Set configFields = Field.setOf(LANGUAGE, expressionField(), NULL_HANDLING);
+ final Field.Set configFields = Field.setOf(TOPIC_REGEX, LANGUAGE, expressionField(), NULL_HANDLING);
if (!config.validateAndRecord(configFields, LOGGER::error)) {
throw new DebeziumException("The provided configuration isn't valid; check the error log for details.");
}
@@ -128,8 +139,6 @@ public void configure(Map configs) {
LOGGER.info("Using language '{}' to evaluate expression '{}'", language, expression);
- nullHandling = NullHandling.parse(config.getString(NULL_HANDLING));
-
// currently only bootstrapping via JSR 223 is supported, but we could add
// support for other means of bootstrapping later on, e.g. for "native"
// bootstrap of GraalJS
@@ -154,10 +163,21 @@ public void configure(Map configs) {
catch (Exception e) {
throw new DebeziumException("Failed to parse expression '" + expression + "'", e);
}
+
+ nullHandling = NullHandling.parse(config.getString(NULL_HANDLING));
+
+ String topicRegex = config.getString(TOPIC_REGEX);
+ if (!Strings.isNullOrEmpty(topicRegex)) {
+ this.topicPattern = Pattern.compile(topicRegex);
+ }
}
@Override
public R apply(R record) {
+ if (topicPattern != null && !topicPattern.matcher(record.topic()).matches()) {
+ return record;
+ }
+
if (record.value() == null) {
if (nullHandling == NullHandling.KEEP) {
return record;
@@ -176,7 +196,7 @@ else if (nullHandling == NullHandling.DROP) {
@Override
public ConfigDef config() {
final ConfigDef config = new ConfigDef();
- Field.group(config, null, LANGUAGE, expressionField(), NULL_HANDLING);
+ Field.group(config, null, TOPIC_REGEX, LANGUAGE, expressionField(), NULL_HANDLING);
return config;
}
diff --git a/debezium-core/src/main/java/io/debezium/transforms/scripting/GraalJsEngine.java b/debezium-core/src/main/java/io/debezium/transforms/scripting/GraalJsEngine.java
index 2bb49bcbd0a..b0a87e612f7 100644
--- a/debezium-core/src/main/java/io/debezium/transforms/scripting/GraalJsEngine.java
+++ b/debezium-core/src/main/java/io/debezium/transforms/scripting/GraalJsEngine.java
@@ -7,12 +7,15 @@
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import javax.script.Bindings;
+import javax.script.ScriptContext;
import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
+import org.apache.kafka.connect.header.Header;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyObject;
@@ -29,18 +32,31 @@ public class GraalJsEngine extends Jsr223Engine {
@Override
protected void configureEngine() {
+ final Bindings bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE);
+ bindings.put("polyglot.js.allowHostAccess", true);
}
@Override
- protected Bindings getBindings(ConnectRecord> record) {
- Bindings bindings = engine.createBindings();
+ protected Object key(ConnectRecord> record) {
+ return asProxyObject((Struct) record.key());
+ }
+
+ @Override
+ protected Object value(ConnectRecord> record) {
+ return asProxyObject((Struct) record.value());
+ }
- bindings.put("key", asProxyObject((Struct) record.key()));
- bindings.put("value", asProxyObject((Struct) record.value()));
- bindings.put("keySchema", record.keySchema());
- bindings.put("valueSchema", record.valueSchema());
+ @Override
+ protected Object headers(ConnectRecord> record) {
+ return asProxyObject(doHeaders(record));
+ }
- return bindings;
+ @Override
+ protected RecordHeader header(Header header) {
+ if (header.value() instanceof Struct) {
+ return new RecordHeader(header.schema(), asProxyObject((Struct) header.value()));
+ }
+ return super.header(header);
}
/**
@@ -83,4 +99,33 @@ public Object getMember(String key) {
}
};
}
+
+ /**
+ * Exposes the given Map as a {@link ProxyObject}, allowing for simplified
+ * property reference.
+ */
+ private ProxyObject asProxyObject(Map map) {
+ return new ProxyObject() {
+
+ @Override
+ public void putMember(String key, Value value) {
+ throw new UnsupportedOperationException("Record attributes must not be modified from within this transformation");
+ }
+
+ @Override
+ public boolean hasMember(String key) {
+ return map.containsKey(key);
+ }
+
+ @Override
+ public Object getMemberKeys() {
+ return map.keySet();
+ }
+
+ @Override
+ public Object getMember(String key) {
+ return map.get(key);
+ }
+ };
+ }
}
diff --git a/debezium-core/src/main/java/io/debezium/transforms/scripting/Jsr223Engine.java b/debezium-core/src/main/java/io/debezium/transforms/scripting/Jsr223Engine.java
index 4eee2e8c780..a5a0b5e90ed 100644
--- a/debezium-core/src/main/java/io/debezium/transforms/scripting/Jsr223Engine.java
+++ b/debezium-core/src/main/java/io/debezium/transforms/scripting/Jsr223Engine.java
@@ -5,6 +5,9 @@
*/
package io.debezium.transforms.scripting;
+import java.util.HashMap;
+import java.util.Map;
+
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
@@ -13,6 +16,7 @@
import javax.script.ScriptException;
import org.apache.kafka.connect.connector.ConnectRecord;
+import org.apache.kafka.connect.header.Header;
import io.debezium.DebeziumException;
@@ -60,14 +64,40 @@ protected void configureEngine() {
protected Bindings getBindings(ConnectRecord> record) {
final Bindings bindings = engine.createBindings();
- bindings.put("key", record.key());
- bindings.put("value", record.value());
+ bindings.put("key", key(record));
+ bindings.put("value", value(record));
bindings.put("keySchema", record.keySchema());
bindings.put("valueSchema", record.valueSchema());
+ bindings.put("topic", record.topic());
+ bindings.put("header", headers(record));
return bindings;
}
+ protected Object key(ConnectRecord> record) {
+ return record.key();
+ }
+
+ protected Object value(ConnectRecord> record) {
+ return record.value();
+ }
+
+ protected RecordHeader header(Header header) {
+ return new RecordHeader(header.schema(), header.value());
+ }
+
+ protected Object headers(ConnectRecord> record) {
+ return doHeaders(record);
+ }
+
+ protected Map doHeaders(ConnectRecord> record) {
+ final Map headers = new HashMap<>();
+ for (Header header : record.headers()) {
+ headers.put(header.key(), header(header));
+ }
+ return headers;
+ }
+
@SuppressWarnings("unchecked")
@Override
public T eval(ConnectRecord> record, Class type) {
diff --git a/debezium-core/src/main/java/io/debezium/transforms/scripting/RecordHeader.java b/debezium-core/src/main/java/io/debezium/transforms/scripting/RecordHeader.java
new file mode 100644
index 00000000000..bb2542c1844
--- /dev/null
+++ b/debezium-core/src/main/java/io/debezium/transforms/scripting/RecordHeader.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.transforms.scripting;
+
+import org.apache.kafka.connect.data.Schema;
+
+public class RecordHeader {
+
+ /**
+ * Value of the header
+ */
+ public final Object value;
+
+ /**
+ * Schema of the header
+ */
+ public final Schema schema;
+
+ public RecordHeader(Schema schema, Object value) {
+ super();
+ this.value = value;
+ this.schema = schema;
+ }
+
+ @Override
+ public String toString() {
+ return "RecordHeader [value=" + value + ", schema=" + schema + "]";
+ }
+}
diff --git a/debezium-core/src/test/java/io/debezium/junit/SkipWhenKafkaVersion.java b/debezium-core/src/test/java/io/debezium/junit/SkipWhenKafkaVersion.java
index f205830e33c..9d5d4e7e46b 100644
--- a/debezium-core/src/test/java/io/debezium/junit/SkipWhenKafkaVersion.java
+++ b/debezium-core/src/test/java/io/debezium/junit/SkipWhenKafkaVersion.java
@@ -27,6 +27,33 @@
String description() default "";
public enum KafkaVersion {
+ KAFKA_1XX {
+ @Override
+ boolean isLessThan(int major, int minor, int patch) {
+ return major < 1;
+ }
+
+ @Override
+ boolean isLessThanOrEqualTo(int major, int minor, int patch) {
+ return isLessThan(major, minor, patch) || isEqualTo(major, minor, patch);
+ }
+
+ @Override
+ boolean isEqualTo(int major, int minor, int patch) {
+ return major == 1;
+ }
+
+ @Override
+ boolean isGreaterThanOrEqualTo(int major, int minor, int patch) {
+ return major > 1 || isEqualTo(major, minor, patch);
+ }
+
+ @Override
+ boolean isGreaterThan(int major, int minor, int patch) {
+ return major > 1;
+ }
+ },
+
KAFKA_241 {
@Override
boolean isLessThan(int major, int minor, int patch) {
diff --git a/debezium-core/src/test/java/io/debezium/transforms/ExtractNewRecordStateTest.java b/debezium-core/src/test/java/io/debezium/transforms/ExtractNewRecordStateTest.java
index 449dfce0361..2f0130e1507 100644
--- a/debezium-core/src/test/java/io/debezium/transforms/ExtractNewRecordStateTest.java
+++ b/debezium-core/src/test/java/io/debezium/transforms/ExtractNewRecordStateTest.java
@@ -12,7 +12,6 @@
import java.util.Iterator;
import java.util.Map;
-import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
@@ -31,8 +30,6 @@ public class ExtractNewRecordStateTest {
private static final String DROP_TOMBSTONES = "drop.tombstones";
private static final String HANDLE_DELETES = "delete.handling.mode";
- private static final String OPERATION_HEADER = "operation.header";
- private static final String ADD_SOURCE_FIELDS = "add.source.fields";
private static final String ROUTE_BY_FIELD = "route.by.field";
private static final String ADD_FIELDS = "add.fields";
private static final String ADD_HEADERS = "add.headers";
@@ -248,7 +245,7 @@ public void testHandleCreateRewrite() {
try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
final Map props = new HashMap<>();
props.put(HANDLE_DELETES, "rewrite");
- props.put(OPERATION_HEADER, "true");
+ props.put(ADD_HEADERS, "op");
transform.configure(props);
final SourceRecord createRecord = createCreateRecord();
@@ -306,19 +303,6 @@ public void testUnwrapPropagatesRecordHeaders() {
}
}
- @Test
- public void testAddSourceField() {
- try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
- final Map props = new HashMap<>();
- props.put(ADD_SOURCE_FIELDS, "lsn");
- transform.configure(props);
-
- final SourceRecord createRecord = createComplexCreateRecord();
- final SourceRecord unwrapped = transform.apply(createRecord);
- assertThat(((Struct) unwrapped.value()).get("__lsn")).isEqualTo(1234);
- }
- }
-
@Test
@FixFor("DBZ-1452")
public void testAddField() {
@@ -524,25 +508,11 @@ public void testAddHeadersHandleDeleteRewriteAndTombstone() {
}
}
- @Test
- public void testAddSourceFields() {
+ @Test(expected = IllegalArgumentException.class)
+ public void testAddFieldNonExistantField() {
try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
final Map props = new HashMap<>();
- props.put(ADD_SOURCE_FIELDS, "lsn , version");
- transform.configure(props);
-
- final SourceRecord createRecord = createComplexCreateRecord();
- final SourceRecord unwrapped = transform.apply(createRecord);
- assertThat(((Struct) unwrapped.value()).get("__lsn")).isEqualTo(1234);
- assertThat(((Struct) unwrapped.value()).getString("__version")).isEqualTo("version!");
- }
- }
-
- @Test(expected = ConfigException.class)
- public void testAddSourceNonExistantField() {
- try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
- final Map props = new HashMap<>();
- props.put(ADD_SOURCE_FIELDS, "nope");
+ props.put(ADD_FIELDS, "nope");
transform.configure(props);
final SourceRecord createRecord = createComplexCreateRecord();
@@ -552,39 +522,6 @@ public void testAddSourceNonExistantField() {
}
}
- @Test
- @FixFor("DBZ-1448")
- public void testAddSourceFieldHandleDeleteRewrite() {
- try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
- final Map props = new HashMap<>();
- props.put(HANDLE_DELETES, "rewrite");
- props.put(ADD_SOURCE_FIELDS, "lsn");
- transform.configure(props);
-
- final SourceRecord deleteRecord = createDeleteRecord();
- final SourceRecord unwrapped = transform.apply(deleteRecord);
- assertThat(((Struct) unwrapped.value()).getString("__deleted")).isEqualTo("true");
- assertThat(((Struct) unwrapped.value()).get("__lsn")).isEqualTo(1234);
- }
- }
-
- @Test
- @FixFor("DBZ-1448")
- public void testAddSourceFieldsHandleDeleteRewrite() {
- try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
- final Map props = new HashMap<>();
- props.put(HANDLE_DELETES, "rewrite");
- props.put(ADD_SOURCE_FIELDS, "lsn,version");
- transform.configure(props);
-
- final SourceRecord deleteRecord = createDeleteRecord();
- final SourceRecord unwrapped = transform.apply(deleteRecord);
- assertThat(((Struct) unwrapped.value()).getString("__deleted")).isEqualTo("true");
- assertThat(((Struct) unwrapped.value()).get("__lsn")).isEqualTo(1234);
- assertThat(((Struct) unwrapped.value()).getString("__version")).isEqualTo("version!");
- }
- }
-
@Test
@FixFor("DBZ-1452")
public void testAddFieldHandleDeleteRewrite() {
@@ -661,7 +598,7 @@ public void testAddFieldsSpecifyStructHandleDeleteRewrite() {
public void testSchemaChangeEventWithOperationHeader() {
try (final ExtractNewRecordState transform = new ExtractNewRecordState<>()) {
final Map props = new HashMap<>();
- props.put(OPERATION_HEADER, "true");
+ props.put(ADD_HEADERS, "op");
transform.configure(props);
final SourceRecord unknownRecord = createUnknownRecord();
diff --git a/debezium-core/src/test/java/io/debezium/transforms/FilterTest.java b/debezium-core/src/test/java/io/debezium/transforms/FilterTest.java
index a29600c0641..46dd34b5fd6 100644
--- a/debezium-core/src/test/java/io/debezium/transforms/FilterTest.java
+++ b/debezium-core/src/test/java/io/debezium/transforms/FilterTest.java
@@ -14,17 +14,20 @@
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
+import org.apache.kafka.connect.header.ConnectHeaders;
import org.apache.kafka.connect.source.SourceRecord;
import org.junit.Test;
import io.debezium.DebeziumException;
import io.debezium.data.Envelope;
+import io.debezium.doc.FixFor;
/**
* @author Jiri Pechanec
*/
public class FilterTest {
+ private static final String TOPIC_REGEX = "topic.regex";
private static final String LANGUAGE = "language";
private static final String EXPRESSION = "condition";
private static final String NULL_HANDLING = "null.handling.mode";
@@ -95,6 +98,49 @@ public void shouldProcessCondition() {
}
}
+ @Test
+ @FixFor("DBZ-2074")
+ public void shouldProcessTopic() {
+ try (final Filter transform = new Filter<>()) {
+ final Map props = new HashMap<>();
+ props.put(EXPRESSION, "topic == 'dummy1'");
+ props.put(LANGUAGE, "jsr223.groovy");
+ transform.configure(props);
+ final SourceRecord record = createDeleteRecord(1);
+ assertThat(transform.apply(createDeleteRecord(2))).isNull();
+ assertThat(transform.apply(record)).isSameAs(record);
+ }
+ }
+
+ @Test
+ @FixFor("DBZ-2074")
+ public void shouldProcessHeader() {
+ try (final Filter transform = new Filter<>()) {
+ final Map props = new HashMap<>();
+ props.put(EXPRESSION, "header.idh.value == 1");
+ props.put(LANGUAGE, "jsr223.groovy");
+ transform.configure(props);
+ final SourceRecord record = createDeleteRecord(1);
+ assertThat(transform.apply(createDeleteRecord(2))).isNull();
+ assertThat(transform.apply(record)).isSameAs(record);
+ }
+ }
+
+ @Test
+ @FixFor("DBZ-2024")
+ public void shouldApplyTopicRegex() {
+ try (final Filter transform = new Filter<>()) {
+ final Map props = new HashMap<>();
+ props.put(TOPIC_REGEX, "dum.*");
+ props.put(EXPRESSION, "value.op != 'd' || value.before.id != 2");
+ props.put(LANGUAGE, "jsr223.groovy");
+ transform.configure(props);
+ final SourceRecord record = createDeleteCustomerRecord(2);
+ assertThat(transform.apply(record)).isSameAs(record);
+ assertThat(transform.apply(createDeleteRecord(2))).isNull();
+ }
+ }
+
@Test
public void shouldKeepNulls() {
try (final Filter transform = new Filter<>()) {
@@ -153,7 +199,36 @@ private SourceRecord createDeleteRecord(int id) {
source.put("lsn", 1234);
source.put("version", "version!");
final Struct payload = deleteEnvelope.delete(before, source, Instant.now());
- return new SourceRecord(new HashMap<>(), new HashMap<>(), "dummy", envelope.schema(), payload);
+ final ConnectHeaders headers = new ConnectHeaders();
+ headers.addInt("idh", id);
+ return new SourceRecord(new HashMap<>(), new HashMap<>(), "dummy" + id, 0,
+ null, null,
+ envelope.schema(), payload,
+ (long) id,
+ headers);
+ }
+
+ private SourceRecord createDeleteCustomerRecord(int id) {
+ final Schema deleteSourceSchema = SchemaBuilder.struct()
+ .field("lsn", SchemaBuilder.int32())
+ .field("version", SchemaBuilder.string())
+ .build();
+
+ Envelope deleteEnvelope = Envelope.defineSchema()
+ .withName("customer.Envelope")
+ .withRecord(recordSchema)
+ .withSource(deleteSourceSchema)
+ .build();
+
+ final Struct before = new Struct(recordSchema);
+ final Struct source = new Struct(deleteSourceSchema);
+
+ before.put("id", (byte) id);
+ before.put("name", "myRecord");
+ source.put("lsn", 1234);
+ source.put("version", "version!");
+ final Struct payload = deleteEnvelope.delete(before, source, Instant.now());
+ return new SourceRecord(new HashMap<>(), new HashMap<>(), "customer", envelope.schema(), payload);
}
private SourceRecord createNullRecord() {
@@ -172,4 +247,18 @@ public void shouldRunJavaScript() {
assertThat(transform.apply(record)).isSameAs(record);
}
}
+
+ @Test
+ @FixFor("DBZ-2074")
+ public void shouldRunJavaScriptWithHeaderAndTopic() {
+ try (final Filter transform = new Filter<>()) {
+ final Map props = new HashMap<>();
+ props.put(EXPRESSION, "header.idh.value == 1 && topic.startsWith('dummy')");
+ props.put(LANGUAGE, "jsr223.graal.js");
+ transform.configure(props);
+ final SourceRecord record = createDeleteRecord(1);
+ assertThat(transform.apply(createDeleteRecord(2))).isNull();
+ assertThat(transform.apply(record)).isSameAs(record);
+ }
+ }
}
diff --git a/debezium-core/src/test/java/io/debezium/transforms/RouterTest.java b/debezium-core/src/test/java/io/debezium/transforms/RouterTest.java
index a22c8b56c52..87c79bb25de 100644
--- a/debezium-core/src/test/java/io/debezium/transforms/RouterTest.java
+++ b/debezium-core/src/test/java/io/debezium/transforms/RouterTest.java
@@ -19,12 +19,14 @@
import io.debezium.DebeziumException;
import io.debezium.data.Envelope;
+import io.debezium.doc.FixFor;
/**
* @author Jiri Pechanec
*/
public class RouterTest {
+ private static final String TOPIC_REGEX = "topic.regex";
private static final String LANGUAGE = "language";
private static final String EXPRESSION = "topic.expression";
private static final String NULL_HANDLING = "null.handling.mode";
@@ -76,6 +78,20 @@ public void shouldRoute() {
}
}
+ @Test
+ @FixFor("DBZ-2024")
+ public void shouldApplyTopicRegex() {
+ try (final ContentBasedRouter transform = new ContentBasedRouter<>()) {
+ final Map props = new HashMap<>();
+ props.put(TOPIC_REGEX, "orig.*");
+ props.put(EXPRESSION, "value == null ? 'nulls' : (value.before.id == 1 ? 'ones' : null)");
+ props.put(LANGUAGE, "jsr223.groovy");
+ transform.configure(props);
+ assertThat(transform.apply(createDeleteRecord(1)).topic()).describedAs("Matching topic").isEqualTo("ones");
+ assertThat(transform.apply(createDeleteCustomerRecord(1)).topic()).describedAs("Non-matching topic").isEqualTo("customer");
+ }
+ }
+
@Test
public void shouldKeepNulls() {
try (final ContentBasedRouter transform = new ContentBasedRouter<>()) {
@@ -137,6 +153,29 @@ private SourceRecord createDeleteRecord(int id) {
return new SourceRecord(new HashMap<>(), new HashMap<>(), "original", envelope.schema(), payload);
}
+ private SourceRecord createDeleteCustomerRecord(int id) {
+ final Schema deleteSourceSchema = SchemaBuilder.struct()
+ .field("lsn", SchemaBuilder.int32())
+ .field("version", SchemaBuilder.string())
+ .build();
+
+ Envelope deleteEnvelope = Envelope.defineSchema()
+ .withName("dummy.Envelope")
+ .withRecord(recordSchema)
+ .withSource(deleteSourceSchema)
+ .build();
+
+ final Struct before = new Struct(recordSchema);
+ final Struct source = new Struct(deleteSourceSchema);
+
+ before.put("id", (byte) id);
+ before.put("name", "myRecord");
+ source.put("lsn", 1234);
+ source.put("version", "version!");
+ final Struct payload = deleteEnvelope.delete(before, source, Instant.now());
+ return new SourceRecord(new HashMap<>(), new HashMap<>(), "customer", envelope.schema(), payload);
+ }
+
private SourceRecord createNullRecord() {
return new SourceRecord(new HashMap<>(), new HashMap<>(), "original", null, null, null, null);
}
diff --git a/debezium-ddl-parser/pom.xml b/debezium-ddl-parser/pom.xml
index 61c71b93db3..aee166b257a 100644
--- a/debezium-ddl-parser/pom.xml
+++ b/debezium-ddl-parser/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
diff --git a/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlLexer.g4 b/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlLexer.g4
index 0b8bac2683e..d4549c0bada 100644
--- a/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlLexer.g4
+++ b/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlLexer.g4
@@ -363,7 +363,7 @@ COMMIT: 'COMMIT';
COMPACT: 'COMPACT';
COMPLETION: 'COMPLETION';
COMPRESSED: 'COMPRESSED';
-COMPRESSION: 'COMPRESSION';
+COMPRESSION: QUOTE_SYMB? 'COMPRESSION' QUOTE_SYMB?;
CONCURRENT: 'CONCURRENT';
CONNECTION: 'CONNECTION';
CONSISTENT: 'CONSISTENT';
@@ -439,6 +439,7 @@ INPLACE: 'INPLACE';
INSERT_METHOD: 'INSERT_METHOD';
INSTALL: 'INSTALL';
INSTANCE: 'INSTANCE';
+INSTANT: 'INSTANT';
INVISIBLE: 'INVISIBLE';
INVOKER: 'INVOKER';
IO: 'IO';
diff --git a/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlParser.g4 b/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlParser.g4
index dcc8b2aaf70..12c7de804b4 100644
--- a/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlParser.g4
+++ b/debezium-ddl-parser/src/main/antlr4/io/debezium/ddl/parser/mysql/generated/MySqlParser.g4
@@ -598,7 +598,7 @@ alterSpecification
')' #alterByAddColumns
| ADD indexFormat=(INDEX | KEY) uid? indexType?
indexColumnNames indexOption* #alterByAddIndex
- | ADD (CONSTRAINT name=uid?)? PRIMARY KEY
+ | ADD (CONSTRAINT name=uid?)? PRIMARY KEY index=uid?
indexType? indexColumnNames indexOption* #alterByAddPrimaryKey
| ADD (CONSTRAINT name=uid?)? UNIQUE
indexFormat=(INDEX | KEY)? indexName=uid?
@@ -609,7 +609,7 @@ alterSpecification
| ADD (CONSTRAINT name=uid?)? FOREIGN KEY
indexName=uid? indexColumnNames referenceDefinition #alterByAddForeignKey
| ADD (CONSTRAINT name=uid?)? CHECK '(' expression ')' #alterByAddCheckTableConstraint
- | ALGORITHM '='? algType=(DEFAULT | INPLACE | COPY) #alterBySetAlgorithm
+ | ALGORITHM '='? algType=(DEFAULT | INSTANT | INPLACE | COPY) #alterBySetAlgorithm
| ALTER COLUMN? uid
(SET DEFAULT defaultValue | DROP DEFAULT) #alterByChangeDefault
| CHANGE COLUMN? oldColumn=uid
diff --git a/debezium-ddl-parser/src/test/resources/mysql/examples/ddl_alter.sql b/debezium-ddl-parser/src/test/resources/mysql/examples/ddl_alter.sql
index 01cb34b4363..793f2cb2a5d 100644
--- a/debezium-ddl-parser/src/test/resources/mysql/examples/ddl_alter.sql
+++ b/debezium-ddl-parser/src/test/resources/mysql/examples/ddl_alter.sql
@@ -1,6 +1,6 @@
#begin
-- Alter Table
-alter table ship_class add column ship_spec varchar(150) first, add somecol int after start_build;
+alter table ship_class add column ship_spec varchar(150) first, add somecol int after start_build, algorithm=instant;
alter table t3 add column (c2 decimal(10, 2) comment 'comment`' null, c3 enum('abc', 'cba', 'aaa')), add index t3_i1 using btree (c2) comment 'some index';
alter table t2 add constraint t2_pk_constraint primary key (1c), alter column `_` set default 1;
alter table ship_class change column somecol col_for_del tinyint first;
@@ -11,6 +11,10 @@ alter table t2 drop primary key;
alter table t3 rename to table3column;
alter table childtable add constraint `fk1` foreign key (idParent) references parenttable(id) on delete restrict on update cascade;
alter table table3column default character set = cp1251;
+alter table table1 add primary key (id);
+alter table table1 add primary key table_pk (id);
+alter table table1 add primary key `table_pk` (id);
+alter table table1 add primary key `table_pk` (`id`);
#end
#begin
-- Alter database
diff --git a/debezium-embedded/pom.xml b/debezium-embedded/pom.xml
index 523f6a85df0..77240e45737 100644
--- a/debezium-embedded/pom.xml
+++ b/debezium-embedded/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
diff --git a/debezium-embedded/src/main/java/io/debezium/embedded/ConvertingEngineBuilder.java b/debezium-embedded/src/main/java/io/debezium/embedded/ConvertingEngineBuilder.java
index 3f098e36ba2..f32468bd2bc 100644
--- a/debezium-embedded/src/main/java/io/debezium/embedded/ConvertingEngineBuilder.java
+++ b/debezium-embedded/src/main/java/io/debezium/embedded/ConvertingEngineBuilder.java
@@ -78,7 +78,9 @@ private boolean isFormat(Class extends SerializationFormat>> format1, Class<
@Override
public Builder notifying(ChangeConsumer handler) {
delegate.notifying(
- (records, committer) -> handler.handleBatch(records.stream().map(x -> toFormat.apply(x)).collect(Collectors.toList()),
+ (records, committer) -> handler.handleBatch(records.stream()
+ .map(x -> toFormat.apply(x))
+ .collect(Collectors.toList()),
new RecordCommitter() {
@Override
diff --git a/debezium-embedded/src/main/java/io/debezium/embedded/EmbeddedEngineChangeEvent.java b/debezium-embedded/src/main/java/io/debezium/embedded/EmbeddedEngineChangeEvent.java
index a71b0a129ee..2e3f056c0d3 100644
--- a/debezium-embedded/src/main/java/io/debezium/embedded/EmbeddedEngineChangeEvent.java
+++ b/debezium-embedded/src/main/java/io/debezium/embedded/EmbeddedEngineChangeEvent.java
@@ -37,6 +37,11 @@ public V record() {
return value;
}
+ @Override
+ public String destination() {
+ return sourceRecord.topic();
+ }
+
public SourceRecord sourceRecord() {
return sourceRecord;
}
diff --git a/debezium-microbenchmark/pom.xml b/debezium-microbenchmark/pom.xml
index 95b9dd5046d..00a46cb5401 100644
--- a/debezium-microbenchmark/pom.xml
+++ b/debezium-microbenchmark/pom.xml
@@ -3,7 +3,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml4.0.0
diff --git a/debezium-quarkus-outbox/deployment/pom.xml b/debezium-quarkus-outbox/deployment/pom.xml
index 27e56b84c4c..0b7bcc81507 100644
--- a/debezium-quarkus-outbox/deployment/pom.xml
+++ b/debezium-quarkus-outbox/deployment/pom.xml
@@ -6,7 +6,7 @@
io.debeziumdebezium-quarkus-outbox-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml
diff --git a/debezium-quarkus-outbox/integration-tests/pom.xml b/debezium-quarkus-outbox/integration-tests/pom.xml
index b333c326ea4..cbcd0218f7a 100644
--- a/debezium-quarkus-outbox/integration-tests/pom.xml
+++ b/debezium-quarkus-outbox/integration-tests/pom.xml
@@ -6,7 +6,7 @@
io.debeziumdebezium-quarkus-outbox-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml
@@ -155,4 +155,4 @@
-
\ No newline at end of file
+
diff --git a/debezium-quarkus-outbox/pom.xml b/debezium-quarkus-outbox/pom.xml
index e8648445ce5..42e5ad50359 100644
--- a/debezium-quarkus-outbox/pom.xml
+++ b/debezium-quarkus-outbox/pom.xml
@@ -6,7 +6,7 @@
io.debeziumdebezium-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml
diff --git a/debezium-quarkus-outbox/runtime/pom.xml b/debezium-quarkus-outbox/runtime/pom.xml
index 28eb7c3b37e..9a3e41a305b 100644
--- a/debezium-quarkus-outbox/runtime/pom.xml
+++ b/debezium-quarkus-outbox/runtime/pom.xml
@@ -6,7 +6,7 @@
io.debeziumdebezium-quarkus-outbox-parent
- 1.2.0-SNAPSHOT
+ 1.2.0-SNAPSHOT-cs../pom.xml
@@ -33,7 +33,6 @@
io.quarkusquarkus-junit5-internal
- ${quarkus.version}test
diff --git a/debezium-server/pom.xml b/debezium-server/pom.xml
new file mode 100644
index 00000000000..6051d1b6503
--- /dev/null
+++ b/debezium-server/pom.xml
@@ -0,0 +1,229 @@
+
+
+
+ io.debezium
+ debezium-parent
+ 1.2.0-SNAPSHOT-cs
+ ../pom.xml
+
+ 4.0.0
+ debezium-server
+ Debezium Standalone Quarkus Server
+ jar
+
+
+ 2.13.13
+
+
+ server-distribution
+
+
+
+
+
+ io.quarkus
+ quarkus-bom-deployment
+ ${quarkus.version}
+ pom
+ import
+
+
+
+
+ software.amazon.awssdk
+ kinesis
+ ${version.kinesis}
+
+
+
+
+
+
+
+ io.quarkus
+ quarkus-core
+
+
+ io.quarkus
+ quarkus-smallrye-health
+
+
+ io.quarkus
+ quarkus-resteasy
+
+
+
+ io.debezium
+ debezium-api
+
+
+ io.debezium
+ debezium-embedded
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+
+
+
+
+ software.amazon.awssdk
+ kinesis
+ true
+
+
+
+
+ io.quarkus
+ quarkus-junit5
+ test
+
+
+ org.easytesting
+ fest-assert
+ test
+
+
+ io.debezium
+ debezium-core
+ test-jar
+ test
+
+
+ org.awaitility
+ awaitility
+ test
+
+
+ io.confluent
+ kafka-connect-avro-converter
+ test
+
+
+ org.testcontainers
+ testcontainers
+ test
+
+
+ io.debezium
+ debezium-connector-postgres
+ test
+
+
+
+
+
+ io.quarkus
+ quarkus-maven-plugin
+ ${quarkus.version}
+
+
+
+ build
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+
+
+ integration-test
+
+ integration-test
+
+
+
+ verify
+
+ verify
+
+
+
+
+ ${skipITs}
+ true
+
+ IT
+
+
+
+
+
+
+
+ true
+ src/main/resources
+
+ **/build.properties
+
+
+
+
+
+
+
+ assembly
+
+ false
+
+
+
+ io.debezium
+ debezium-connector-mysql
+
+
+ io.debezium
+ debezium-connector-postgres
+ runtime
+
+
+ io.debezium
+ debezium-connector-mongodb
+
+
+ io.debezium
+ debezium-connector-sqlserver
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ ${version.assembly.plugin}
+
+
+ io.debezium
+ debezium-assembly-descriptors
+ ${project.version}
+
+
+
+
+ default
+ package
+
+ single
+
+
+ ${project.artifactId}-${project.version}
+ true
+
+ ${assembly.descriptor}
+
+ posix
+
+
+
+
+
+
+
+
+
diff --git a/debezium-server/src/main/distro/conf/application.properties b/debezium-server/src/main/distro/conf/application.properties
new file mode 100644
index 00000000000..70744913bef
--- /dev/null
+++ b/debezium-server/src/main/distro/conf/application.properties
@@ -0,0 +1,12 @@
+debezium.sink.type=kinesis
+debezium.sink.kinesis.region=eu-central-1
+debezium.source.connector.class=io.debezium.connector.postgresql.PostgresConnector
+debezium.source.offset.storage.file.filename=data/offsets.dat
+debezium.source.offset.flush.interval.ms=0
+debezium.source.database.hostname=localhost
+debezium.source.database.port=5432
+debezium.source.database.user=postgres
+debezium.source.database.password=postgres
+debezium.source.database.dbname=postgres
+debezium.source.database.server.name=tutorial
+debezium.source.schema.whitelist=inventory
diff --git a/debezium-server/src/main/distro/run.sh b/debezium-server/src/main/distro/run.sh
new file mode 100755
index 00000000000..6b557edb85f
--- /dev/null
+++ b/debezium-server/src/main/distro/run.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright Debezium Authors.
+#
+# Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+#
+
+if [ -z "$JAVA_HOME" ]; then
+ JAVA_BINARY="java"
+else
+ JAVA_BINARY="$JAVA_HOME/bin/java"
+fi
+
+RUNNER=$(ls debezium-server-*runner.jar)
+
+exec $JAVA_BINARY $DEBEZIUM_OPTS $JAVA_OPTS -cp "$RUNNER:conf:lib/*" io.debezium.server.Main
diff --git a/debezium-server/src/main/java/io/debezium/server/ConnectorLifecycle.java b/debezium-server/src/main/java/io/debezium/server/ConnectorLifecycle.java
new file mode 100644
index 00000000000..0f5c13a1594
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/ConnectorLifecycle.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server;
+
+import javax.enterprise.context.ApplicationScoped;
+import javax.enterprise.event.Event;
+import javax.inject.Inject;
+
+import org.eclipse.microprofile.health.HealthCheck;
+import org.eclipse.microprofile.health.HealthCheckResponse;
+import org.eclipse.microprofile.health.Liveness;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import io.debezium.engine.DebeziumEngine;
+import io.debezium.server.events.ConnectorCompletedEvent;
+import io.debezium.server.events.ConnectorStartedEvent;
+import io.debezium.server.events.ConnectorStoppedEvent;
+import io.debezium.server.events.TaskStartedEvent;
+import io.debezium.server.events.TaskStoppedEvent;
+
+/**
+ * The server lifecycle listener that published CDI events based on the lifecycle changes and also provides
+ * Microprofile Health information.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+@Liveness
+@ApplicationScoped
+public class ConnectorLifecycle implements HealthCheck, DebeziumEngine.ConnectorCallback, DebeziumEngine.CompletionCallback {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorLifecycle.class);
+
+ private volatile boolean live = false;
+
+ @Inject
+ Event connectorStartedEvent;
+
+ @Inject
+ Event connectorStoppedEvent;
+
+ @Inject
+ Event taskStartedEvent;
+
+ @Inject
+ Event taskStoppedEvent;
+
+ @Inject
+ Event connectorCompletedEvent;
+
+ @Override
+ public void connectorStarted() {
+ LOGGER.debug("Connector started");
+ connectorStartedEvent.fire(new ConnectorStartedEvent());
+ }
+
+ @Override
+ public void connectorStopped() {
+ LOGGER.debug("Connector stopped");
+ connectorStoppedEvent.fire(new ConnectorStoppedEvent());
+ }
+
+ @Override
+ public void taskStarted() {
+ LOGGER.debug("Task started");
+ taskStartedEvent.fire(new TaskStartedEvent());
+ live = true;
+ }
+
+ @Override
+ public void taskStopped() {
+ LOGGER.debug("Task stopped");
+ taskStoppedEvent.fire(new TaskStoppedEvent());
+ }
+
+ @Override
+ public void handle(boolean success, String message, Throwable error) {
+ LOGGER.info("Connector completed: success = '{}', message = '{}', error = '{}'", success, message, error);
+ connectorCompletedEvent.fire(new ConnectorCompletedEvent(success, message, error));
+ live = false;
+ }
+
+ @Override
+ public HealthCheckResponse call() {
+ LOGGER.trace("Healthcheck called - live = '{}'", live);
+ return HealthCheckResponse.named("debezium").state(live).build();
+ }
+
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/CustomConsumerBuilder.java b/debezium-server/src/main/java/io/debezium/server/CustomConsumerBuilder.java
new file mode 100644
index 00000000000..e185214ed73
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/CustomConsumerBuilder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server;
+
+import static java.lang.annotation.ElementType.FIELD;
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.ElementType.PARAMETER;
+import static java.lang.annotation.ElementType.TYPE;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+import javax.inject.Qualifier;
+
+@Qualifier
+@Target({ TYPE, METHOD, PARAMETER, FIELD })
+@Retention(RUNTIME)
+@Documented
+public @interface CustomConsumerBuilder {
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/DebeziumServer.java b/debezium-server/src/main/java/io/debezium/server/DebeziumServer.java
new file mode 100644
index 00000000000..7e732655f1f
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/DebeziumServer.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server;
+
+import java.util.Optional;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import javax.annotation.PostConstruct;
+import javax.enterprise.context.ApplicationScoped;
+import javax.enterprise.context.spi.CreationalContext;
+import javax.enterprise.event.Observes;
+import javax.enterprise.inject.spi.Bean;
+import javax.enterprise.inject.spi.BeanManager;
+import javax.inject.Inject;
+
+import org.eclipse.microprofile.config.Config;
+import org.eclipse.microprofile.config.ConfigProvider;
+import org.eclipse.microprofile.health.Liveness;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import io.debezium.DebeziumException;
+import io.debezium.engine.ChangeEvent;
+import io.debezium.engine.DebeziumEngine;
+import io.debezium.engine.DebeziumEngine.ChangeConsumer;
+import io.debezium.engine.format.Avro;
+import io.debezium.engine.format.Json;
+import io.debezium.engine.format.SerializationFormat;
+import io.quarkus.runtime.ShutdownEvent;
+import io.quarkus.runtime.Startup;
+
+/**
+ *
The entry point of the Quarkus-based standalone server. The server is configured via Quarkus/Microprofile Configuration sources
+ * and provides few out-of-the-box target implementations.
+ *
The implementation uses CDI to find all classes that implements {@link DebeziumEngine.ChangeConsumer} interface.
+ * The candidate classes should be annotated with {@code @Named} annotation and should be {@code Dependent}.
+ *
The configuration option {@code debezium.consumer} provides a name of the consumer that should be used and the value
+ * must match to exactly one of the implementation classes.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+@ApplicationScoped
+@Startup
+public class DebeziumServer {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DebeziumServer.class);
+
+ private static final String PROP_PREFIX = "debezium.";
+ private static final String PROP_SOURCE_PREFIX = PROP_PREFIX + "source.";
+ private static final String PROP_SINK_PREFIX = PROP_PREFIX + "sink.";
+ private static final String PROP_FORMAT_PREFIX = PROP_PREFIX + "format.";
+ private static final String PROP_TRANSFORMS_PREFIX = PROP_PREFIX + "transforms.";
+ private static final String PROP_KEY_FORMAT_PREFIX = PROP_FORMAT_PREFIX + "key.";
+ private static final String PROP_VALUE_FORMAT_PREFIX = PROP_FORMAT_PREFIX + "value.";
+
+ private static final String PROP_TRANSFORMS = PROP_PREFIX + "transforms";
+ private static final String PROP_SINK_TYPE = PROP_SINK_PREFIX + "type";
+ private static final String PROP_KEY_FORMAT = PROP_FORMAT_PREFIX + "key";
+ private static final String PROP_VALUE_FORMAT = PROP_FORMAT_PREFIX + "value";
+ private static final String PROP_TERMINATION_WAIT = PROP_PREFIX + "termination.wait";
+
+ private static final String FORMAT_JSON = Json.class.getSimpleName().toLowerCase();
+ private static final String FORMAT_AVRO = Avro.class.getSimpleName().toLowerCase();
+
+ private ExecutorService executor = Executors.newSingleThreadExecutor();
+
+ @Inject
+ BeanManager beanManager;
+
+ @Inject
+ @Liveness
+ ConnectorLifecycle health;
+
+ private Bean>> consumerBean;
+ private CreationalContext>> consumerBeanCreationalContext;
+ private DebeziumEngine.ChangeConsumer> consumer;
+ private DebeziumEngine> engine;
+
+ @SuppressWarnings("unchecked")
+ @PostConstruct
+ public void start() {
+ final Config config = ConfigProvider.getConfig();
+ final String name = config.getValue(PROP_SINK_TYPE, String.class);
+
+ final Set> beans = beanManager.getBeans(name).stream()
+ .filter(x -> DebeziumEngine.ChangeConsumer.class.isAssignableFrom(x.getBeanClass()))
+ .collect(Collectors.toSet());
+ LOGGER.debug("Found {} candidate consumer(s)", beans.size());
+
+ if (beans.size() == 0) {
+ throw new DebeziumException("No Debezium consumer named '" + name + "' is available");
+ }
+ else if (beans.size() > 1) {
+ throw new DebeziumException("Multiple Debezium consumers named '" + name + "' were found");
+ }
+
+ consumerBean = (Bean>>) beans.iterator().next();
+ consumerBeanCreationalContext = beanManager.createCreationalContext(consumerBean);
+ consumer = consumerBean.create(consumerBeanCreationalContext);
+ LOGGER.info("Consumer '{}' instantiated", consumer.getClass().getName());
+
+ final Class extends SerializationFormat>> keyFormat = getFormat(config, PROP_KEY_FORMAT);
+ final Class extends SerializationFormat>> valueFormat = getFormat(config, PROP_VALUE_FORMAT);
+ final Properties props = new Properties();
+ configToProperties(config, props, PROP_SOURCE_PREFIX, "");
+ configToProperties(config, props, PROP_FORMAT_PREFIX, "key.converter.");
+ configToProperties(config, props, PROP_FORMAT_PREFIX, "value.converter.");
+ configToProperties(config, props, PROP_KEY_FORMAT_PREFIX, "key.converter.");
+ configToProperties(config, props, PROP_VALUE_FORMAT_PREFIX, "value.converter.");
+ final Optional transforms = config.getOptionalValue(PROP_TRANSFORMS, String.class);
+ if (transforms.isPresent()) {
+ props.setProperty("transforms", transforms.get());
+ configToProperties(config, props, PROP_TRANSFORMS_PREFIX, "transforms.");
+ }
+ props.setProperty("name", name);
+ LOGGER.debug("Configuration for DebeziumEngine: {}", props);
+
+ DebeziumEngine.Builder> builder = null;
+ // TODO - apply variance and covariance rules on Debezium API to
+ // support direct assignment to DebeziumEngine.Builder>
+ if (keyFormat == Json.class && valueFormat == Json.class) {
+ builder = createJsonJson(consumer);
+ }
+ else if (keyFormat == Json.class && valueFormat == Avro.class) {
+ builder = createJsonAvro(consumer);
+ }
+ else if (keyFormat == Avro.class && valueFormat == Avro.class) {
+ builder = createAvroAvro(consumer);
+ }
+ engine = builder
+ .using(props)
+ .using((DebeziumEngine.ConnectorCallback) health)
+ .using((DebeziumEngine.CompletionCallback) health)
+ .build();
+
+ executor.execute(() -> engine.run());
+ LOGGER.info("Engine executor started");
+ }
+
+ @SuppressWarnings("unchecked")
+ private DebeziumEngine.Builder> createJsonJson(DebeziumEngine.ChangeConsumer> consumer) {
+ return DebeziumEngine.create(Json.class, Json.class)
+ .notifying((DebeziumEngine.ChangeConsumer>) consumer);
+ }
+
+ @SuppressWarnings("unchecked")
+ private DebeziumEngine.Builder> createAvroAvro(DebeziumEngine.ChangeConsumer> consumer) {
+ return DebeziumEngine.create(Avro.class, Avro.class)
+ .notifying((DebeziumEngine.ChangeConsumer>) consumer);
+ }
+
+ @SuppressWarnings("unchecked")
+ private DebeziumEngine.Builder> createJsonAvro(DebeziumEngine.ChangeConsumer> consumer) {
+ return DebeziumEngine.create(Json.class, Avro.class)
+ .notifying((DebeziumEngine.ChangeConsumer>) consumer);
+ }
+
+ private void configToProperties(Config config, Properties props, String oldPrefix, String newPrefix) {
+ for (String name : config.getPropertyNames()) {
+ if (name.startsWith(oldPrefix)) {
+ props.setProperty(newPrefix + name.substring(oldPrefix.length()), config.getValue(name, String.class));
+ }
+ }
+ }
+
+ private Class extends SerializationFormat>> getFormat(Config config, String property) {
+ final String formatName = config.getOptionalValue(property, String.class).orElse(FORMAT_JSON);
+ if (FORMAT_JSON.equals(formatName)) {
+ return Json.class;
+ }
+ else if (FORMAT_AVRO.equals(formatName)) {
+ return Avro.class;
+ }
+ throw new DebeziumException("Unknown format '" + formatName + "' for option " + "'" + property + "'");
+ }
+
+ public void stop(@Observes ShutdownEvent event) {
+ try {
+ LOGGER.info("Received request to stop the engine");
+ final Config config = ConfigProvider.getConfig();
+ engine.close();
+ executor.shutdown();
+ executor.awaitTermination(config.getOptionalValue(PROP_TERMINATION_WAIT, Integer.class).orElse(10), TimeUnit.SECONDS);
+ }
+ catch (Exception e) {
+ LOGGER.error("Exception while shuttting down Debezium", e);
+ }
+ consumerBean.destroy(consumer, consumerBeanCreationalContext);
+ }
+
+ /**
+ * For test purposes only
+ */
+ DebeziumEngine.ChangeConsumer> getConsumer() {
+ return consumer;
+ }
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/Main.java b/debezium-server/src/main/java/io/debezium/server/Main.java
new file mode 100644
index 00000000000..ea50993aad6
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/Main.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server;
+
+import io.quarkus.runtime.Quarkus;
+import io.quarkus.runtime.annotations.QuarkusMain;
+
+@QuarkusMain
+public class Main {
+
+ public static void main(String... args) {
+ Quarkus.run(args);
+ }
+
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/events/ConnectorCompletedEvent.java b/debezium-server/src/main/java/io/debezium/server/events/ConnectorCompletedEvent.java
new file mode 100644
index 00000000000..6a60af3f07b
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/events/ConnectorCompletedEvent.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server.events;
+
+import java.util.Optional;
+
+/**
+ * Fired when the connector was completed. Provides information about completion state, message
+ * and optional stacktrace in case of error.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+public class ConnectorCompletedEvent {
+
+ private final boolean success;
+ private final String message;
+ private final Throwable error;
+
+ public ConnectorCompletedEvent(boolean success, String message, Throwable error) {
+ this.success = success;
+ this.message = message;
+ this.error = error;
+ }
+
+ /**
+ *
+ * @return true if the connector was completed successfully
+ */
+ public boolean isSuccess() {
+ return success;
+ }
+
+ /**
+ *
+ * @return message associated with connection completion
+ */
+ public String getMessage() {
+ return message;
+ }
+
+ /**
+ *
+ * @return optional error in case the connector has not started successfully or was terminated with an error
+ */
+ public Optional getError() {
+ return Optional.ofNullable(error);
+ }
+
+ @Override
+ public String toString() {
+ return "ConnectorCompletedEvent [success=" + success + ", message=" + message + ", error=" + error + "]";
+ }
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/events/ConnectorStartedEvent.java b/debezium-server/src/main/java/io/debezium/server/events/ConnectorStartedEvent.java
new file mode 100644
index 00000000000..58cdd7766c1
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/events/ConnectorStartedEvent.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server.events;
+
+/**
+ * Fired when the connector is started. The initialization is completed but the execution task
+ * is not started yet.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+public class ConnectorStartedEvent {
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/events/ConnectorStoppedEvent.java b/debezium-server/src/main/java/io/debezium/server/events/ConnectorStoppedEvent.java
new file mode 100644
index 00000000000..f0d0adc0228
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/events/ConnectorStoppedEvent.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server.events;
+
+/**
+ * Fired when the connector is stopped but the final execution completion state is not yet determined.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+public class ConnectorStoppedEvent {
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/events/TaskStartedEvent.java b/debezium-server/src/main/java/io/debezium/server/events/TaskStartedEvent.java
new file mode 100644
index 00000000000..7b007b57a1e
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/events/TaskStartedEvent.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server.events;
+
+/**
+ * Fired right after the connector execution code is started.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+public class TaskStartedEvent {
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/events/TaskStoppedEvent.java b/debezium-server/src/main/java/io/debezium/server/events/TaskStoppedEvent.java
new file mode 100644
index 00000000000..feab430b5bc
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/events/TaskStoppedEvent.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server.events;
+
+/**
+ * Fired right after the connector execution code is stopped.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+public class TaskStoppedEvent {
+}
diff --git a/debezium-server/src/main/java/io/debezium/server/kinesis/KinesisChangeConsumer.java b/debezium-server/src/main/java/io/debezium/server/kinesis/KinesisChangeConsumer.java
new file mode 100644
index 00000000000..be322d311ce
--- /dev/null
+++ b/debezium-server/src/main/java/io/debezium/server/kinesis/KinesisChangeConsumer.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright Debezium Authors.
+ *
+ * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
+ */
+package io.debezium.server.kinesis;
+
+import java.util.List;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import javax.enterprise.context.Dependent;
+import javax.enterprise.inject.Instance;
+import javax.inject.Inject;
+import javax.inject.Named;
+
+import org.eclipse.microprofile.config.Config;
+import org.eclipse.microprofile.config.ConfigProvider;
+import org.eclipse.microprofile.config.inject.ConfigProperty;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import io.debezium.DebeziumException;
+import io.debezium.engine.ChangeEvent;
+import io.debezium.engine.DebeziumEngine;
+import io.debezium.engine.DebeziumEngine.RecordCommitter;
+import io.debezium.server.CustomConsumerBuilder;
+
+import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider;
+import software.amazon.awssdk.core.SdkBytes;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.kinesis.KinesisClient;
+import software.amazon.awssdk.services.kinesis.model.PutRecordRequest;
+
+/**
+ * Implementation of the consumer that delivers the messages into Amazon Kinesis destination.
+ *
+ * @author Jiri Pechanec
+ *
+ */
+@Named("kinesis")
+@Dependent
+public class KinesisChangeConsumer implements DebeziumEngine.ChangeConsumer> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(KinesisChangeConsumer.class);
+
+ private static final String PROP_PREFIX = "debezium.sink.kinesis.";
+ private static final String PROP_REGION_NAME = PROP_PREFIX + "region";
+
+ private String region;
+
+ @ConfigProperty(name = PROP_PREFIX + "credentials.profile", defaultValue = "default")
+ String credentialsProfile;
+
+ @ConfigProperty(name = PROP_PREFIX + "null.key", defaultValue = "default")
+ String nullKey;
+
+ private KinesisClient client = null;
+ private StreamNameMapper streamNameMapper = (x) -> x;
+
+ @Inject
+ @CustomConsumerBuilder
+ Instance customClient;
+
+ @Inject
+ Instance customStreamNameMapper;
+
+ @PostConstruct
+ void connect() {
+ if (customStreamNameMapper.isResolvable()) {
+ streamNameMapper = customStreamNameMapper.get();
+ }
+ LOGGER.info("Using '{}' stream name mapper", streamNameMapper);
+ if (customClient.isResolvable()) {
+ client = customClient.get();
+ LOGGER.info("Obtained custom configured KinesisClient '{}'", client);
+ return;
+ }
+
+ final Config config = ConfigProvider.getConfig();
+ region = config.getValue(PROP_REGION_NAME, String.class);
+ client = KinesisClient.builder()
+ .region(Region.of(region))
+ .credentialsProvider(ProfileCredentialsProvider.create(credentialsProfile))
+ .build();
+ LOGGER.info("Using default KinesisClient '{}'", client);
+ }
+
+ @PreDestroy
+ void close() {
+ try {
+ client.close();
+ }
+ catch (Exception e) {
+ LOGGER.warn("Exception while closing Kinesis client: {}", e);
+ }
+ }
+
+ private byte[] getByte(Object object) {
+ if (object instanceof byte[]) {
+ return (byte[]) object;
+ }
+ else if (object instanceof String) {
+ return ((String) object).getBytes();
+ }
+ throw new DebeziumException(unsupportedTypeMessage(object));
+ }
+
+ private String getString(Object object) {
+ if (object instanceof String) {
+ return (String) object;
+ }
+ throw new DebeziumException(unsupportedTypeMessage(object));
+ }
+
+ public String unsupportedTypeMessage(Object object) {
+ final String type = (object == null) ? "null" : object.getClass().getName();
+ return "Unexpected data type '" + type + "'";
+ }
+
+ @Override
+ public void handleBatch(List> records, RecordCommitter> committer)
+ throws InterruptedException {
+ for (ChangeEvent