forked from scylladb/kafka-connect-scylladb
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add complex types support and tests.
Adds support for all Collections (List, Set, Map), UDT and Tuples. Adds tests that attempt inserting into columns of complex type. Those tests check if implemented codecs kick in when needed. Kafka Struct fields get translated into UDTs. Connector will try to infer correct UserType from table column. If table does not exist yet or is being altered the connector will try to find UserType with the same name as struct's schema. In all cases the UserType needs to already be created in targeted keyspace. Kafka Maps get translated into Scylla maps, some can be inserted into UDTs. Kafka Arrays can be inserted into Lists, Sets and Tuples. Kafka Structs can be inserted into UDTs and Tuples. Updates README with basic information about supported types on Scylla side. All added types should work with table managed by Scylla. UDTs, Maps, Lists, Sets should work with table managed by connector. Fixes scylladb#60
- Loading branch information
Showing
9 changed files
with
669 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
52 changes: 52 additions & 0 deletions
52
src/main/java/io/connect/scylladb/codec/ListTupleCodec.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
package io.connect.scylladb.codec; | ||
|
||
import com.datastax.driver.core.*; | ||
import com.datastax.driver.extras.codecs.MappingCodec; | ||
import com.google.common.reflect.TypeToken; | ||
import org.apache.kafka.connect.data.Field; | ||
import org.apache.kafka.connect.data.Schema; | ||
import org.apache.kafka.connect.data.Struct; | ||
|
||
import java.util.Iterator; | ||
import java.util.List; | ||
import java.util.Set; | ||
import java.util.stream.Collectors; | ||
|
||
public class ListTupleCodec extends MappingCodec<List, TupleValue> { | ||
|
||
private CodecRegistry registry; | ||
private TupleType definition; | ||
|
||
public ListTupleCodec(CodecRegistry registry, TupleType definition) { | ||
super(TypeCodec.tuple(definition), List.class); | ||
this.registry = registry; | ||
this.definition = definition; | ||
} | ||
|
||
@Override | ||
protected TupleValue serialize(List list) { | ||
if (list == null) { | ||
return null; | ||
} | ||
|
||
int size = definition.getComponentTypes().size(); | ||
int listSize = list.size(); | ||
if (listSize != size) { | ||
throw new IllegalArgumentException( | ||
String.format("Expecting %d fields, got %d", size, listSize)); | ||
} | ||
|
||
TupleValue value = definition.newValue(); | ||
Iterator iter = list.iterator(); | ||
for(int i = 0; i < size; i++){ | ||
Object item = iter.next(); | ||
DataType elementType = definition.getComponentTypes().get(i); | ||
value.set(i, item, registry.codecFor(elementType, item)); | ||
} | ||
return value; | ||
} | ||
|
||
@Override | ||
protected List deserialize(TupleValue value) { throw new UnsupportedOperationException(); } | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
package io.connect.scylladb.codec; | ||
|
||
import com.datastax.driver.core.CodecRegistry; | ||
import com.datastax.driver.core.DataType; | ||
import com.datastax.driver.core.UDTValue; | ||
import com.datastax.driver.core.UserType; | ||
import com.datastax.driver.extras.codecs.MappingCodec; | ||
import com.google.common.reflect.TypeToken; | ||
|
||
import java.util.Map; | ||
|
||
public class MapUDTCodec extends MappingCodec<Map, UDTValue> { | ||
CodecRegistry registry; | ||
UserType definition; | ||
|
||
public MapUDTCodec(CodecRegistry registry, UserType udt) { | ||
super(registry.codecFor(udt), Map.class); | ||
this.registry = registry; | ||
this.definition = udt; | ||
} | ||
|
||
@Override | ||
protected UDTValue serialize(Map map) { | ||
if (map == null || map.isEmpty()) { | ||
return null; | ||
} | ||
if(!(map.keySet().iterator().next() instanceof String)){ | ||
throw new UnsupportedOperationException("This codec (" + this.getClass().getSimpleName() | ||
+ ") handles only Maps that have String as their key type."); | ||
} | ||
int size = definition.getFieldNames().size(); | ||
int mapSize = map.size(); | ||
if (mapSize != size) { | ||
throw new IllegalArgumentException( | ||
String.format("Expecting %d fields, got %d", size, mapSize)); | ||
} | ||
|
||
final UDTValue value = definition.newValue(); | ||
definition.getFieldNames().stream().forEach(fieldName -> { | ||
if (!map.containsKey(fieldName)) { | ||
throw new IllegalArgumentException( | ||
String.format( | ||
"Field %s in UDT %s not found in input map", | ||
fieldName, definition.getName())); | ||
} | ||
DataType fieldType = definition.getFieldType(fieldName); | ||
value.set(fieldName, map.get(fieldName), registry.codecFor(fieldType, map.get(fieldName))); | ||
} | ||
); | ||
|
||
return value; | ||
} | ||
|
||
@Override | ||
protected Map deserialize(UDTValue value) { | ||
throw new UnsupportedOperationException("This codec (" + this.getClass().getSimpleName() + ") does not support deserialization from UDT to Map"); | ||
} | ||
} |
50 changes: 50 additions & 0 deletions
50
src/main/java/io/connect/scylladb/codec/StructTupleCodec.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
package io.connect.scylladb.codec; | ||
|
||
import com.datastax.driver.core.*; | ||
import com.datastax.driver.extras.codecs.MappingCodec; | ||
import org.apache.kafka.connect.data.Field; | ||
import org.apache.kafka.connect.data.Schema; | ||
import org.apache.kafka.connect.data.Struct; | ||
|
||
import java.util.Set; | ||
import java.util.stream.Collectors; | ||
|
||
public class StructTupleCodec extends MappingCodec<Struct, TupleValue> { | ||
|
||
private CodecRegistry registry; | ||
private TupleType definition; | ||
|
||
public StructTupleCodec(CodecRegistry registry, TupleType definition) { | ||
super(TypeCodec.tuple(definition), Struct.class); | ||
this.registry = registry; | ||
this.definition = definition; | ||
} | ||
|
||
@Override | ||
protected TupleValue serialize(Struct struct) { | ||
if (struct == null) { | ||
return null; | ||
} | ||
|
||
int size = definition.getComponentTypes().size(); | ||
Schema schema = struct.schema(); | ||
int structSize = schema.fields().size(); | ||
Set<String> structFieldNames = schema.fields().stream().map(Field::name).collect(Collectors.toSet()); | ||
if (structSize != size) { | ||
throw new IllegalArgumentException( | ||
String.format("Expecting %d fields, got %d", size, structSize)); | ||
} | ||
|
||
TupleValue value = definition.newValue(); | ||
for(int i = 0; i < size; i++){ | ||
Object field = struct.get(schema.fields().get(i)); | ||
DataType elementType = definition.getComponentTypes().get(i); | ||
value.set(i, field, registry.codecFor(elementType, field)); | ||
} | ||
return value; | ||
} | ||
|
||
@Override | ||
protected Struct deserialize(TupleValue value) { throw new UnsupportedOperationException(); } | ||
|
||
} |
59 changes: 59 additions & 0 deletions
59
src/main/java/io/connect/scylladb/codec/StructUDTCodec.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
package io.connect.scylladb.codec; | ||
|
||
import com.datastax.driver.core.CodecRegistry; | ||
import com.datastax.driver.core.DataType; | ||
import com.datastax.driver.core.UDTValue; | ||
import com.datastax.driver.core.UserType; | ||
import com.datastax.driver.extras.codecs.MappingCodec; | ||
import org.apache.kafka.connect.data.Field; | ||
import org.apache.kafka.connect.data.Schema; | ||
import org.apache.kafka.connect.data.Struct; | ||
|
||
import java.util.Set; | ||
import java.util.stream.Collectors; | ||
|
||
public class StructUDTCodec extends MappingCodec<Struct, UDTValue> { | ||
|
||
CodecRegistry registry; | ||
UserType definition; | ||
|
||
public StructUDTCodec(CodecRegistry registry, UserType udt) { | ||
super(registry.codecFor(udt), Struct.class); | ||
this.registry = registry; | ||
this.definition = udt; | ||
} | ||
|
||
@Override | ||
protected UDTValue serialize(Struct struct) { | ||
if (struct == null) { | ||
return null; | ||
} | ||
|
||
int size = definition.getFieldNames().size(); | ||
Schema schema = struct.schema(); | ||
int structSize = schema.fields().size(); | ||
Set<String> structFieldNames = schema.fields().stream().map(Field::name).collect(Collectors.toSet()); | ||
if (structSize != size) { | ||
throw new IllegalArgumentException( | ||
String.format("Expecting %d fields, got %d", size, structSize)); | ||
} | ||
|
||
final UDTValue value = definition.newValue(); | ||
definition.getFieldNames().stream().forEach(fieldName -> { | ||
if (!structFieldNames.contains(fieldName)) { | ||
throw new IllegalArgumentException( | ||
String.format( | ||
"Field %s in UDT %s not found in input struct", | ||
fieldName, definition.getName())); | ||
} | ||
DataType fieldType = definition.getFieldType(fieldName); | ||
value.set(fieldName, struct.get(fieldName), registry.codecFor(fieldType, struct.get(fieldName))); | ||
} | ||
); | ||
|
||
return value; | ||
} | ||
|
||
@Override | ||
protected Struct deserialize(UDTValue value) { throw new UnsupportedOperationException(); } | ||
} |
Oops, something went wrong.