diff --git a/build.gradle b/build.gradle index 4e573bd5c..0da6629f3 100644 --- a/build.gradle +++ b/build.gradle @@ -215,17 +215,117 @@ def getProjectVersion() { return ver } -project('serializers') { +project('serializers:shared') { dependencies { compile project(':common') compile project(':client') + compile group: 'io.pravega', name: 'pravega-client', version: pravegaVersion + compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Serializers shared" + dependsOn delombok + source = delombok.outputDir + failOnError = true + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} + +project('serializers:avro') { + dependencies { + compile project(':serializers:shared') compile group: 'org.apache.avro', name: 'avro', version: avroVersion + testCompile project(path:':serializers:shared', configuration:'testRuntime') + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Serializers avro" + dependsOn delombok + source = delombok.outputDir + failOnError = true + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } + + jar { + from { + configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } + } + } +} + +project('serializers:protobuf') { + dependencies { + compile project(':serializers:shared') compile group: 'com.google.protobuf', name: 'protobuf-java', version: protobufProtocVersion compile group: 'com.google.protobuf', name: 'protobuf-java-util', version: protobufUtilVersion - compile group: 'io.pravega', name: 'pravega-client', version: pravegaVersion - compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion - compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-jsonSchema', version: jacksonVersion + testCompile project(path:':serializers:shared', configuration:'testRuntime') + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Serializers protobuf" + dependsOn delombok + source = delombok.outputDir + failOnError = true + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } + + jar { + from { + configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } + } + } +} + +project('serializers:json') { + dependencies { + compile project(':serializers:shared') compile group: 'com.github.everit-org.json-schema', name: 'org.everit.json.schema', version: everitVersion + compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-jsonSchema', version: jacksonVersion + testCompile project(path:':serializers:shared', configuration:'testRuntime') + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Serializers json" + dependsOn delombok + source = delombok.outputDir + failOnError = true + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } + + jar { + from { + configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } + } + } +} + +project('serializers') { + dependencies { + compile project(':serializers:avro') + compile project(':serializers:protobuf') + compile project(':serializers:json') + compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion + testCompile project(path:':serializers:shared', configuration:'testRuntime') + testCompile files(project(':serializers:avro').sourceSets.test.output) + testCompile files(project(':serializers:protobuf').sourceSets.test.output) + testCompile files(project(':serializers:json').sourceSets.test.output) testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion @@ -242,6 +342,8 @@ project('serializers') { jar { manifest {} + // add zip64=true so that we are able to pack more than 65k files in the jar. + zip64=true from { configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } @@ -317,6 +419,47 @@ project('server') { } } +project('test') { + sourceSets { + test.resources.srcDirs += "$rootDir/resources" + test.java.srcDir project(':server').file("src/test/java") + test.java.srcDir project(':serializers').file("src/test/java") + test.java.srcDir project(':serializers:shared').file("src/test/java") + test.java.srcDir project(':serializers:protobuf').file("src/test/java") + test.java.srcDir project(':serializers:json').file("src/test/java") + test.java.srcDir project(':serializers:avro').file("src/test/java") + } + compileTestJava.dependsOn tasks.getByPath(':server:testClasses') + dependencies { + compile project(':common') + compile project(':contract') + compile project(':client') + compile project(':server') + compile project(':serializers') + compile project(':serializers:protobuf') + compile project(':serializers:avro') + compile project(':serializers:json') + compile project(':serializers:shared') + testCompile (group: 'io.pravega', name: 'pravega-standalone', version: pravegaVersion) { + exclude group: 'javax.ws.rs', module: 'jsr311-api' + } + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile files(project(':server').sourceSets.test.output.classesDir) + testCompile group: 'org.glassfish.jersey.test-framework.providers', name: 'jersey-test-framework-provider-grizzly2', version: jerseyVersion + } + + javadoc { + title = "Test" + dependsOn delombok + source = delombok.outputDir + failOnError = true + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} + apply plugin: 'distribution' distributions { main { @@ -393,6 +536,10 @@ task publishAllJars() { dependsOn ':common:publish' dependsOn ':contract:publish' dependsOn ':server:publish' + dependsOn ':serializers:shared:publish' + dependsOn ':serializers:avro:publish' + dependsOn ':serializers:json:publish' + dependsOn ':serializers:protobuf:publish' dependsOn ':serializers:publish' } diff --git a/checkstyle/spotbugs-exclude.xml b/checkstyle/spotbugs-exclude.xml index 31efb43d4..a1ed58ff9 100644 --- a/checkstyle/spotbugs-exclude.xml +++ b/checkstyle/spotbugs-exclude.xml @@ -3,10 +3,16 @@ - + - + + + + + + + diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java index 2ba913a52..7f98e72f2 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java @@ -24,18 +24,102 @@ public class SchemaRegistryClientConfig { * URI for connecting with registry client. */ private final URI schemaRegistryUri; + /** + * Flag to indicate if authentication is enabled. + */ private final boolean authEnabled; + /** + * Authentication method. + */ private final String authMethod; + /** + * Authentication token. + */ private final String authToken; + /* + * Path to trust store for TLS server authentication certificate. + */ + private final String trustStore; + /** + * Type of key store used as the trust store - e.g. jks, pkcs11, pkcs12, dks etc. If not specified then either + * certificate (if configured) or default java TLS store as specified in system properties would be used. + */ + private final String trustStoreType; + /** + * Password for the trust store. Defaults to null. + */ + private final String trustStorePassword; + /** + * If the trust store is a certificate file, typically DER or PEM file. + */ + private final String certificate; + /** + * Flag to indicate whether client should perform host name validation in server authentication certificate. + */ + private final boolean validateHostName; - private SchemaRegistryClientConfig(URI schemaRegistryUri, boolean authEnabled, String authMethod, String authToken) { + private SchemaRegistryClientConfig(URI schemaRegistryUri, boolean authEnabled, String authMethod, String authToken, + String trustStore, String trustStoreType, String trustStorePassword, + String certificate, boolean validateHostName) { this.schemaRegistryUri = schemaRegistryUri; this.authEnabled = authEnabled; this.authMethod = authMethod; this.authToken = authToken; + this.trustStore = trustStore; + this.certificate = certificate; + this.trustStoreType = trustStoreType; + this.trustStorePassword = trustStorePassword; + this.validateHostName = validateHostName; } public static final class SchemaRegistryClientConfigBuilder { private boolean authEnabled = false; + private boolean validateHostName = false; + private String trustStore = null; + private String trustStoreType = null; + private String trustStorePassword = null; + private String certificate = null; + + public SchemaRegistryClientConfigBuilder certificate(String certificate) { + this.certificate = certificate; + return this; + } + + public SchemaRegistryClientConfigBuilder trustStore(String trustStore, String trustStoreType, String trustStorePassword) { + this.trustStore = trustStore; + return this.trustStoreType(trustStoreType) + .trustStorePassword(trustStorePassword); + } + + private SchemaRegistryClientConfigBuilder trustStoreType(String trustStoreType) { + this.trustStoreType = trustStoreType; + return this; + } + + private SchemaRegistryClientConfigBuilder trustStorePassword(String trustStorePassword) { + this.trustStorePassword = trustStorePassword; + return this; + } + + public SchemaRegistryClientConfigBuilder authentication(String authMethod, String authToken) { + return this.authEnabled() + .authMethod(authMethod) + .authToken(authToken); + } + + private SchemaRegistryClientConfigBuilder authEnabled() { + this.authEnabled = true; + return this; + } + + private SchemaRegistryClientConfigBuilder authMethod(String authMethod) { + this.authMethod = authMethod; + return this; + } + + private SchemaRegistryClientConfigBuilder authToken(String authToken) { + this.authToken = authToken; + return this; + } } } diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index fb4aecb9b..e21eaa9e0 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -10,8 +10,10 @@ package io.pravega.schemaregistry.client; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; import io.pravega.common.Exceptions; import io.pravega.common.util.Retry; +import io.pravega.common.util.CertificateUtils; import io.pravega.schemaregistry.common.AuthHelper; import io.pravega.schemaregistry.common.ContinuationTokenIterator; import io.pravega.schemaregistry.contract.data.CodecType; @@ -34,15 +36,25 @@ import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; import io.pravega.schemaregistry.contract.transform.ModelHelper; import io.pravega.schemaregistry.contract.v1.ApiV1; +import lombok.SneakyThrows; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.client.proxy.WebResourceFactory; import javax.annotation.Nullable; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.ClientRequestFilter; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; +import java.io.FileInputStream; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; import java.util.AbstractMap; import java.util.Collection; import java.util.Comparator; @@ -70,6 +82,8 @@ public class SchemaRegistryClientImpl implements SchemaRegistryClient { .withExpBackoff(100, 2, 10, 1000) .retryWhen(x -> Exceptions.unwrap(x) instanceof ConnectionException); private static final int GROUP_LIMIT = 100; + private static final String HTTPS = "https"; + private static final String TLS = "TLS"; private final ApiV1.GroupsApi groupProxy; private final ApiV1.SchemasApi schemaProxy; @@ -77,7 +91,17 @@ public class SchemaRegistryClientImpl implements SchemaRegistryClient { private final Client client; SchemaRegistryClientImpl(SchemaRegistryClientConfig config, String namespace) { - client = ClientBuilder.newClient(new ClientConfig()); + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(config.getSchemaRegistryUri()); + ClientBuilder clientBuilder = ClientBuilder.newBuilder().withConfig(new ClientConfig()); + if (HTTPS.equalsIgnoreCase(config.getSchemaRegistryUri().getScheme())) { + clientBuilder = clientBuilder.sslContext(getSSLContext(config)); + if (!config.isValidateHostName()) { + // host name verification is done by default. To disable it we will add an always true verifier + clientBuilder.hostnameVerifier((a, b) -> true); + } + } + client = clientBuilder.build(); if (config.isAuthEnabled()) { client.register((ClientRequestFilter) context -> { context.getHeaders().add(HttpHeaders.AUTHORIZATION, @@ -486,4 +510,44 @@ public void close() throws Exception { client.close(); } } + + @SneakyThrows(IOException.class) + private SSLContext getSSLContext(SchemaRegistryClientConfig config) { + try { + // If trust store is specified, use it. + // Else check if certificate is provided. + // Else use default SSL context. + KeyStore trustStore; + if (config.getTrustStore() != null) { + trustStore = getTrustStore(config); + } else if (config.getCertificate() != null) { + trustStore = CertificateUtils.createTrustStore(config.getCertificate()); + } else { + return SSLContext.getDefault(); + } + TrustManagerFactory factory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + factory.init(trustStore); + SSLContext tlsContext = SSLContext.getInstance(TLS); + tlsContext.init(null, factory.getTrustManagers(), null); + return tlsContext; + } catch (KeyManagementException | KeyStoreException | NoSuchAlgorithmException | + CertificateException e) { + throw new IllegalStateException("Failure initializing trust store", e); + } + } + + private KeyStore getTrustStore(SchemaRegistryClientConfig config) throws KeyStoreException, + IOException, NoSuchAlgorithmException, CertificateException { + KeyStore trustStore; + trustStore = KeyStore.getInstance(config.getTrustStoreType()); + try (FileInputStream fin = new FileInputStream(config.getTrustStore())) { + String trustStorePassword = config.getTrustStorePassword(); + if (trustStorePassword != null) { + trustStore.load(fin, trustStorePassword.toCharArray()); + } else { + trustStore.load(fin, null); + } + } + return trustStore; + } } diff --git a/client/src/test/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfigTest.java b/client/src/test/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfigTest.java new file mode 100644 index 000000000..c056193f4 --- /dev/null +++ b/client/src/test/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfigTest.java @@ -0,0 +1,54 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import org.junit.Test; + +import static org.junit.Assert.*; + +public class SchemaRegistryClientConfigTest { + @Test + public void testSSLConfig() { + SchemaRegistryClientConfig config = SchemaRegistryClientConfig.builder().build(); + assertNull(config.getCertificate()); + assertNull(config.getTrustStore()); + assertNull(config.getTrustStoreType()); + assertNull(config.getTrustStorePassword()); + + config = SchemaRegistryClientConfig.builder().certificate("certPath").build(); + assertEquals(config.getCertificate(), "certPath"); + assertNull(config.getTrustStorePassword()); + assertNull(config.getTrustStoreType()); + assertNull(config.getTrustStorePassword()); + + config = SchemaRegistryClientConfig.builder().trustStore("trustStorePath", "JKS", null).build(); + assertNull(config.getCertificate()); + assertEquals(config.getTrustStore(), "trustStorePath"); + assertEquals(config.getTrustStoreType(), "JKS"); + assertNull(config.getTrustStorePassword()); + + config = SchemaRegistryClientConfig.builder().trustStore("trustStorePath", "JKS", "password").build(); + assertNull(config.getCertificate()); + assertEquals(config.getTrustStore(), "trustStorePath"); + assertEquals(config.getTrustStoreType(), "JKS"); + assertEquals(config.getTrustStorePassword(), "password"); + } + + @Test + public void testAuthConfig() { + SchemaRegistryClientConfig config = SchemaRegistryClientConfig.builder().build(); + assertFalse(config.isAuthEnabled()); + + config = SchemaRegistryClientConfig.builder().authentication("method", "token").build(); + assertTrue(config.isAuthEnabled()); + assertEquals(config.getAuthMethod(), "method"); + assertEquals(config.getAuthToken(), "token"); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index 3d6892068..4597a1b1c 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -147,7 +147,7 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward.Backwar return new io.pravega.schemaregistry.contract.data.BackwardAndForward.Backward(); } else if (obj instanceof BackwardTill) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill( - decode(((io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill) backward.getBackwardPolicy()).getVersionInfo())); + decode(((io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill) obj).getVersionInfo())); } else if (obj instanceof BackwardTransitive) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTransitive(); } else { @@ -176,7 +176,7 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward return new io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward(); } else if (obj instanceof ForwardTill) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill( - decode(((io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill) forward.getForwardPolicy()).getVersionInfo())); + decode(((io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill) obj).getVersionInfo())); } else if (obj instanceof ForwardTransitive) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTransitive(); } else { diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index ab9f92366..9df6ce455 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.contract.transform; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.contract.generated.rest.model.Backward; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardAndForward; @@ -28,6 +29,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; import org.junit.Test; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; @@ -35,6 +37,8 @@ import static org.junit.Assert.*; public class ModelHelperTest { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + @Test public void testDecode() { SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).fullTypeName("a"); @@ -159,73 +163,77 @@ public void testEncode() { } @Test - public void testEncodeAndDecodeCompatibility() { + public void testEncodeAndDecodeCompatibility() throws IOException { io.pravega.schemaregistry.contract.data.Compatibility compatibility = io.pravega.schemaregistry.contract.data.Compatibility.allowAny(); - Compatibility encoded = ModelHelper.encode(compatibility); + Compatibility encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); io.pravega.schemaregistry.contract.data.Compatibility decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.denyAll(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backward(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.forward(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTransitive(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.forwardTransitive(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.full(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.fullTransitive(); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = new io.pravega.schemaregistry.contract.data.VersionInfo("a", 1, 1); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTill(versionInfo); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.forwardTill(versionInfo); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardTill(versionInfo, versionInfo); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardOneAndForwardTill(versionInfo); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardOne(versionInfo); - encoded = ModelHelper.encode(compatibility); + encoded = convert(ModelHelper.encode(compatibility), Compatibility.class); decoded = ModelHelper.decode(encoded); assertEquals(compatibility, decoded); - + } + + private T convert(T t, Class tClass) throws IOException { + String str = OBJECT_MAPPER.writeValueAsString(t); + return OBJECT_MAPPER.readValue(str, tClass); } } diff --git a/gradle.properties b/gradle.properties index 3493130ec..867349c4e 100644 --- a/gradle.properties +++ b/gradle.properties @@ -46,7 +46,7 @@ gradleGitPluginVersion=2.2.0 avroVersion=1.9.1 avroProtobufVersion=1.7.7 snappyVersion=1.1.7.3 -pravegaVersion=0.8.0-2591.37c5082-SNAPSHOT +pravegaVersion=0.8.0-2623.279ac21-SNAPSHOT pravegaKeyCloakVersion=0.7.0 # Version and base tags can be overridden at build time diff --git a/gradle/maven.gradle b/gradle/maven.gradle index 2bc88598c..e4fbd4aa8 100644 --- a/gradle/maven.gradle +++ b/gradle/maven.gradle @@ -39,7 +39,7 @@ plugins.withId('maven') { } } - pom.artifactId = project.path.replace(':', '') + pom.artifactId = project.path.replaceFirst(':', '').replace(':', '-') pom.project { name "Pravega" url "http://pravega.io" @@ -80,7 +80,7 @@ plugins.withId('maven') { install { repositories { mavenInstaller { - pom.artifactId = project.path.replace(':', '') + pom.artifactId = project.path.replaceFirst(':', '').replace(':', '-') } } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroDeserializer.java similarity index 84% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java rename to serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroDeserializer.java index a8db6971b..d73bd2832 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java +++ b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroDeserializer.java @@ -7,13 +7,16 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.avro.impl; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import org.apache.avro.Schema; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; @@ -39,7 +42,7 @@ class AvroDeserializer extends AbstractDeserializer { } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + public final T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Preconditions.checkNotNull(writerSchemaInfo); Schema writerSchema; if (knownSchemas.containsKey(writerSchemaInfo)) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroGenericDeserializer.java similarity index 70% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java rename to serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroGenericDeserializer.java index 77af89edf..5cd0e23ca 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java +++ b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroGenericDeserializer.java @@ -7,12 +7,15 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.avro.impl; import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.io.BinaryDecoder; @@ -23,17 +26,17 @@ import java.io.InputStream; import java.util.concurrent.ConcurrentHashMap; -class AvroGenericDeserializer extends AbstractDeserializer { +public class AvroGenericDeserializer extends AbstractDeserializer { private final ConcurrentHashMap knownSchemas; - AvroGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, + public AvroGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, SerializerConfig.Decoders decoder, EncodingCache encodingCache) { super(groupId, client, schema, false, decoder, encodingCache, true); this.knownSchemas = new ConcurrentHashMap<>(); } @Override - protected Object deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + public final Object deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Preconditions.checkNotNull(writerSchemaInfo); Schema writerSchema = knownSchemas.computeIfAbsent(writerSchemaInfo, x -> AvroSchema.from(x).getSchema()); Schema readerSchema = knownSchemas.computeIfAbsent(readerSchemaInfo, x -> AvroSchema.from(x).getSchema()); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroSerializer.java similarity index 82% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java rename to serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroSerializer.java index 99231a11c..32f20face 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java +++ b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroSerializer.java @@ -7,12 +7,13 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.avro.impl; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.serializer.shared.codec.Encoder; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.IndexedRecord; @@ -25,9 +26,9 @@ import java.io.IOException; import java.io.OutputStream; -class AvroSerializer extends AbstractSerializer { +public class AvroSerializer extends AbstractSerializer { private final AvroSchema avroSchema; - AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, + public AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, Encoder encoder, boolean registerSchema) { super(groupId, client, schema, encoder, registerSchema, true); this.avroSchema = schema; diff --git a/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroSerializerFactory.java b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroSerializerFactory.java new file mode 100644 index 000000000..7233893a7 --- /dev/null +++ b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/impl/AvroSerializerFactory.java @@ -0,0 +1,187 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.avro.impl; + +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedAndGenericDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; + +import javax.annotation.Nullable; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for Avro serializers and deserializers. + */ +@Slf4j +public class AvroSerializerFactory { + /** + * Creates a typed Avro serializer for the schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an AvroSchema + * @param Type of event. It accepts either POJO or Avro generated classes and serializes them. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer serializer(@NonNull SerializerConfig config, @NonNull AvroSchema schema) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + String groupId = config.getGroupId(); + return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), config.isRegisterSchema()); + } + + /** + * Creates a typed avro deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an AvroSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use + * {@link #genericDeserializer(SerializerConfig, AvroSchema)} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer deserializer(@NonNull SerializerConfig config, @NonNull AvroSchema schema) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AvroDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); + } + + /** + * Creates a generic avro deserializer. It has the optional parameter for schema. + * If the schema is not supplied, the writer schema is used for deserialization into {@link GenericRecord}. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an AvroSchema. It can be null to indicate that writer schema should + * be used for deserialization. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer genericDeserializer(@NonNull SerializerConfig config, @Nullable AvroSchema schema) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AvroGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); + } + + /** + * A multiplexed Avro serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer multiTypeSerializer(@NonNull SerializerConfig config, @NonNull Map, AvroSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + Map, AbstractSerializer> serializerMap = getSerializerMap(config, schemas, groupId, schemaRegistryClient); + return new MultiplexedSerializer<>(serializerMap); + } + + private static Map, AbstractSerializer> getSerializerMap( + SerializerConfig config, Map, AvroSchema> schemas, String groupId, + SchemaRegistryClient schemaRegistryClient) { + return schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), + config.isRegisterSchema()))); + } + + /** + * A multiplexed Avro Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer multiTypeDeserializer( + @NonNull SerializerConfig config, @NonNull Map, AvroSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = getDeserializerMap(config, schemas, groupId, schemaRegistryClient, encodingCache); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), + encodingCache); + } + + private static Map> getDeserializerMap( + SerializerConfig config, Map, AvroSchema> schemas, String groupId, + SchemaRegistryClient schemaRegistryClient, EncodingCache encodingCache) { + return schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); + } + + /** + * A multiplexed Avro Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects or a generic + * object + */ + public static Serializer> typedOrGenericDeserializer( + @NonNull SerializerConfig config, @NonNull Map, AvroSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = getDeserializerMap(config, schemas, groupId, schemaRegistryClient, encodingCache); + AbstractDeserializer genericDeserializer = new AvroGenericDeserializer(groupId, schemaRegistryClient, + null, config.getDecoders(), encodingCache); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoders(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/schemas/AvroSchema.java similarity index 97% rename from serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java rename to serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/schemas/AvroSchema.java index 4fccf058b..a5eda3923 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/avro/src/main/java/io/pravega/schemaregistry/serializer/avro/schemas/AvroSchema.java @@ -7,13 +7,14 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.schemas; +package io.pravega.schemaregistry.serializer.avro.schemas; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; import lombok.Getter; import org.apache.avro.generic.GenericRecord; import org.apache.avro.reflect.ReflectData; diff --git a/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/impl/SerializerTest.java b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/impl/SerializerTest.java new file mode 100644 index 000000000..a3ea83379 --- /dev/null +++ b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/impl/SerializerTest.java @@ -0,0 +1,163 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.avro.impl; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import io.pravega.schemaregistry.serializer.avro.testobjs.SchemaDefinitions; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test1; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test2; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.SneakyThrows; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class SerializerTest { + @Test + public void testAvroSerializers() { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + AvroSchema schema1 = AvroSchema.of(Test1.class); + AvroSchema schema2 = AvroSchema.of(Test2.class); + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); + VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + + Serializer serializerStr = AvroSerializerFactory.serializer(config, of); + GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); + ByteBuffer serialized1 = serializerStr.serialize(enumSymbol); + + Serializer deserializer1 = AvroSerializerFactory.deserializer(config, of); + Object deserializedEnum = deserializer1.deserialize(serialized1); + assertEquals(deserializedEnum, enumSymbol); + + Serializer serializer = AvroSerializerFactory.serializer(config, schema1); + Test1 test1 = new Test1("name", 1); + ByteBuffer serialized = serializer.serialize(test1); + + Serializer deserializer = AvroSerializerFactory.deserializer(config, schema1); + Test1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, test1); + + serialized = serializer.serialize(test1); + Serializer genericDeserializer = AvroSerializerFactory.genericDeserializer(config, null); + Object genericDeserialized = genericDeserializer.deserialize(serialized); + assertTrue(genericDeserialized instanceof GenericRecord); + assertEquals(((GenericRecord) genericDeserialized).get("name").toString(), "name"); + assertEquals(((GenericRecord) genericDeserialized).get("field1"), 1); + + // multi type + Test2 test2 = new Test2("name", 1, "2"); + + AvroSchema schema1Base = AvroSchema.ofSpecificRecord(Test1.class); + AvroSchema schema2Base = AvroSchema.ofSpecificRecord(Test2.class); + Map, AvroSchema> map = new HashMap<>(); + map.put(Test1.class, schema1Base); + map.put(Test2.class, schema2Base); + Serializer multiSerializer = AvroSerializerFactory.multiTypeSerializer(config, map); + serialized = multiSerializer.serialize(test1); + Serializer multiDeserializer = AvroSerializerFactory.multiTypeDeserializer(config, map); + SpecificRecordBase deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, test1); + + serialized = multiSerializer.serialize(test2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, test2); + + Map, AvroSchema> map2 = new HashMap<>(); + map2.put(Test1.class, schema1Base); + Serializer> fallbackDeserializer = AvroSerializerFactory.typedOrGenericDeserializer(config, map2); + + serialized = multiSerializer.serialize(test1); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), test1); + + serialized = multiSerializer.serialize(test2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + @SneakyThrows + public void testAvroSerializersReflect() { + TestClass test1 = new TestClass("name"); + AvroSchema schema1 = AvroSchema.of(TestClass.class); + + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = AvroSerializerFactory.serializer(config, schema1); + ByteBuffer serialized = serializer.serialize(test1); + + Serializer deserializer = AvroSerializerFactory.deserializer(config, schema1); + TestClass deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, test1); + } + + @Data + @NoArgsConstructor + public static class TestClass { + private String test; + + public TestClass(String test) { + this.test = test; + } + } +} diff --git a/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/schemas/SchemasTest.java b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/schemas/SchemasTest.java new file mode 100644 index 000000000..7b5b2e8b8 --- /dev/null +++ b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/schemas/SchemasTest.java @@ -0,0 +1,46 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.avro.schemas; + +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.avro.testobjs.SchemaDefinitions; +import io.pravega.schemaregistry.serializer.shared.testobjs.User; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test1; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test2; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +public class SchemasTest { + @Test + public void testAvroSchema() { + AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); + assertNotNull(schema.getSchema()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schema2 = AvroSchema.of(User.class); + assertNotNull(schema2.getSchema()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schema3 = AvroSchema.of(Test1.class); + assertNotNull(schema3.getSchema()); + assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schemabase1 = AvroSchema.ofSpecificRecord(Test1.class); + assertNotNull(schemabase1.getSchema()); + assertEquals(schemabase1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schemabase2 = AvroSchema.ofSpecificRecord(Test2.class); + assertNotNull(schemabase2.getSchema()); + assertEquals(schemabase2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + } +} diff --git a/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/SchemaDefinitions.java b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/SchemaDefinitions.java new file mode 100644 index 000000000..36a2a8525 --- /dev/null +++ b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/SchemaDefinitions.java @@ -0,0 +1,51 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.avro.testobjs; + +import org.apache.avro.Schema; +import org.apache.avro.SchemaBuilder; + +public class SchemaDefinitions { + public static final Schema ENUM = SchemaBuilder + .enumeration("a").symbols("a", "b", "c"); + + public static final Schema SCHEMA1 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + public static final Schema SCHEMA2 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .withDefault("backwardPolicy compatible with schema1") + .endRecord(); + + public static final Schema SCHEMA3 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("c") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test1.java similarity index 87% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java rename to serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test1.java index 3cbe85040..6943dba77 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java +++ b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test1.java @@ -3,9 +3,8 @@ * * DO NOT EDIT DIRECTLY */ -package io.pravega.schemaregistry.testobjs.generated; +package io.pravega.schemaregistry.serializer.avro.testobjs.generated; -import org.apache.avro.generic.GenericArray; import org.apache.avro.specific.SpecificData; import org.apache.avro.util.Utf8; import org.apache.avro.message.BinaryMessageEncoder; @@ -150,8 +149,8 @@ public void setField1(int value) { * Creates a new Test1 RecordBuilder. * @return A new Test1 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder() { - return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + public static Test1.Builder newBuilder() { + return new Test1.Builder(); } /** @@ -159,11 +158,11 @@ public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuil * @param other The existing builder to copy. * @return A new Test1 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other) { + public static Test1.Builder newBuilder(Test1.Builder other) { if (other == null) { - return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + return new Test1.Builder(); } else { - return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(other); + return new Test1.Builder(other); } } @@ -172,11 +171,11 @@ public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuil * @param other The existing instance to copy. * @return A new Test1 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test1 other) { + public static Test1.Builder newBuilder(Test1 other) { if (other == null) { - return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + return new Test1.Builder(); } else { - return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(other); + return new Test1.Builder(other); } } @@ -198,7 +197,7 @@ private Builder() { * Creates a Builder by copying an existing Builder. * @param other The existing Builder to copy. */ - private Builder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other) { + private Builder(Test1.Builder other) { super(other); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); @@ -214,7 +213,7 @@ private Builder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other * Creates a Builder by copying an existing Test1 instance * @param other The existing instance to copy. */ - private Builder(io.pravega.schemaregistry.testobjs.generated.Test1 other) { + private Builder(Test1 other) { super(SCHEMA$); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); @@ -240,7 +239,7 @@ public CharSequence getName() { * @param value The value of 'name'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test1.Builder setName(CharSequence value) { + public Test1.Builder setName(CharSequence value) { validate(fields()[0], value); this.name = value; fieldSetFlags()[0] = true; @@ -260,7 +259,7 @@ public boolean hasName() { * Clears the value of the 'name' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test1.Builder clearName() { + public Test1.Builder clearName() { name = null; fieldSetFlags()[0] = false; return this; @@ -280,7 +279,7 @@ public int getField1() { * @param value The value of 'field1'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test1.Builder setField1(int value) { + public Test1.Builder setField1(int value) { validate(fields()[1], value); this.field1 = value; fieldSetFlags()[1] = true; @@ -300,7 +299,7 @@ public boolean hasField1() { * Clears the value of the 'field1' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test1.Builder clearField1() { + public Test1.Builder clearField1() { fieldSetFlags()[1] = false; return this; } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test2.java similarity index 88% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java rename to serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test2.java index 3695a01a3..d9e7cc134 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java +++ b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test2.java @@ -3,9 +3,8 @@ * * DO NOT EDIT DIRECTLY */ -package io.pravega.schemaregistry.testobjs.generated; +package io.pravega.schemaregistry.serializer.avro.testobjs.generated; -import org.apache.avro.generic.GenericArray; import org.apache.avro.specific.SpecificData; import org.apache.avro.util.Utf8; import org.apache.avro.message.BinaryMessageEncoder; @@ -172,8 +171,8 @@ public void setField2(CharSequence value) { * Creates a new Test2 RecordBuilder. * @return A new Test2 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder() { - return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + public static Test2.Builder newBuilder() { + return new Test2.Builder(); } /** @@ -181,11 +180,11 @@ public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuil * @param other The existing builder to copy. * @return A new Test2 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other) { + public static Test2.Builder newBuilder(Test2.Builder other) { if (other == null) { - return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + return new Test2.Builder(); } else { - return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(other); + return new Test2.Builder(other); } } @@ -194,11 +193,11 @@ public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuil * @param other The existing instance to copy. * @return A new Test2 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test2 other) { + public static Test2.Builder newBuilder(Test2 other) { if (other == null) { - return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + return new Test2.Builder(); } else { - return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(other); + return new Test2.Builder(other); } } @@ -221,7 +220,7 @@ private Builder() { * Creates a Builder by copying an existing Builder. * @param other The existing Builder to copy. */ - private Builder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other) { + private Builder(Test2.Builder other) { super(other); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); @@ -241,7 +240,7 @@ private Builder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other * Creates a Builder by copying an existing Test2 instance * @param other The existing instance to copy. */ - private Builder(io.pravega.schemaregistry.testobjs.generated.Test2 other) { + private Builder(Test2 other) { super(SCHEMA$); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); @@ -271,7 +270,7 @@ public CharSequence getName() { * @param value The value of 'name'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setName(CharSequence value) { + public Test2.Builder setName(CharSequence value) { validate(fields()[0], value); this.name = value; fieldSetFlags()[0] = true; @@ -291,7 +290,7 @@ public boolean hasName() { * Clears the value of the 'name' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearName() { + public Test2.Builder clearName() { name = null; fieldSetFlags()[0] = false; return this; @@ -311,7 +310,7 @@ public int getField1() { * @param value The value of 'field1'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setField1(int value) { + public Test2.Builder setField1(int value) { validate(fields()[1], value); this.field1 = value; fieldSetFlags()[1] = true; @@ -331,7 +330,7 @@ public boolean hasField1() { * Clears the value of the 'field1' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearField1() { + public Test2.Builder clearField1() { fieldSetFlags()[1] = false; return this; } @@ -350,7 +349,7 @@ public CharSequence getField2() { * @param value The value of 'field2'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setField2(CharSequence value) { + public Test2.Builder setField2(CharSequence value) { validate(fields()[2], value); this.field2 = value; fieldSetFlags()[2] = true; @@ -370,7 +369,7 @@ public boolean hasField2() { * Clears the value of the 'field2' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearField2() { + public Test2.Builder clearField2() { field2 = null; fieldSetFlags()[2] = false; return this; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test3.java similarity index 89% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java rename to serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test3.java index 32a122681..338280b74 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java +++ b/serializers/avro/src/test/java/io/pravega/schemaregistry/serializer/avro/testobjs/generated/Test3.java @@ -3,9 +3,8 @@ * * DO NOT EDIT DIRECTLY */ -package io.pravega.schemaregistry.testobjs.generated; +package io.pravega.schemaregistry.serializer.avro.testobjs.generated; -import org.apache.avro.generic.GenericArray; import org.apache.avro.specific.SpecificData; import org.apache.avro.util.Utf8; import org.apache.avro.message.BinaryMessageEncoder; @@ -194,8 +193,8 @@ public void setField3(CharSequence value) { * Creates a new Test3 RecordBuilder. * @return A new Test3 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder() { - return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + public static Test3.Builder newBuilder() { + return new Test3.Builder(); } /** @@ -203,11 +202,11 @@ public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuil * @param other The existing builder to copy. * @return A new Test3 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other) { + public static Test3.Builder newBuilder(Test3.Builder other) { if (other == null) { - return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + return new Test3.Builder(); } else { - return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(other); + return new Test3.Builder(other); } } @@ -216,11 +215,11 @@ public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuil * @param other The existing instance to copy. * @return A new Test3 RecordBuilder */ - public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test3 other) { + public static Test3.Builder newBuilder(Test3 other) { if (other == null) { - return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + return new Test3.Builder(); } else { - return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(other); + return new Test3.Builder(other); } } @@ -244,7 +243,7 @@ private Builder() { * Creates a Builder by copying an existing Builder. * @param other The existing Builder to copy. */ - private Builder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other) { + private Builder(Test3.Builder other) { super(other); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); @@ -268,7 +267,7 @@ private Builder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other * Creates a Builder by copying an existing Test3 instance * @param other The existing instance to copy. */ - private Builder(io.pravega.schemaregistry.testobjs.generated.Test3 other) { + private Builder(Test3 other) { super(SCHEMA$); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); @@ -302,7 +301,7 @@ public CharSequence getName() { * @param value The value of 'name'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setName(CharSequence value) { + public Test3.Builder setName(CharSequence value) { validate(fields()[0], value); this.name = value; fieldSetFlags()[0] = true; @@ -322,7 +321,7 @@ public boolean hasName() { * Clears the value of the 'name' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearName() { + public Test3.Builder clearName() { name = null; fieldSetFlags()[0] = false; return this; @@ -342,7 +341,7 @@ public int getField1() { * @param value The value of 'field1'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField1(int value) { + public Test3.Builder setField1(int value) { validate(fields()[1], value); this.field1 = value; fieldSetFlags()[1] = true; @@ -362,7 +361,7 @@ public boolean hasField1() { * Clears the value of the 'field1' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField1() { + public Test3.Builder clearField1() { fieldSetFlags()[1] = false; return this; } @@ -381,7 +380,7 @@ public CharSequence getField2() { * @param value The value of 'field2'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField2(CharSequence value) { + public Test3.Builder setField2(CharSequence value) { validate(fields()[2], value); this.field2 = value; fieldSetFlags()[2] = true; @@ -401,7 +400,7 @@ public boolean hasField2() { * Clears the value of the 'field2' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField2() { + public Test3.Builder clearField2() { field2 = null; fieldSetFlags()[2] = false; return this; @@ -421,7 +420,7 @@ public CharSequence getField3() { * @param value The value of 'field3'. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField3(CharSequence value) { + public Test3.Builder setField3(CharSequence value) { validate(fields()[3], value); this.field3 = value; fieldSetFlags()[3] = true; @@ -441,7 +440,7 @@ public boolean hasField3() { * Clears the value of the 'field3' field. * @return This builder. */ - public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField3() { + public Test3.Builder clearField3() { field3 = null; fieldSetFlags()[3] = false; return this; diff --git a/serializers/avro/src/test/resources/avro/avroTest1.avsc b/serializers/avro/src/test/resources/avro/avroTest1.avsc new file mode 100644 index 000000000..a7d5e71ea --- /dev/null +++ b/serializers/avro/src/test/resources/avro/avroTest1.avsc @@ -0,0 +1,9 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type1", + "fields": [ + {"name": "a", "type": "string"}, + {"name": "b", "type": "int"} + ] +} \ No newline at end of file diff --git a/serializers/avro/src/test/resources/avro/avroTest2.avsc b/serializers/avro/src/test/resources/avro/avroTest2.avsc new file mode 100644 index 000000000..f0ebc9c52 --- /dev/null +++ b/serializers/avro/src/test/resources/avro/avroTest2.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type2", + "fields": [ + {"name": "c", "type": "string"}, + {"name": "d", "type": "int"}, + {"name": "e", "type": "string"} + ] +} \ No newline at end of file diff --git a/serializers/avro/src/test/resources/avro/avroTest3.avsc b/serializers/avro/src/test/resources/avro/avroTest3.avsc new file mode 100644 index 000000000..ed07a0543 --- /dev/null +++ b/serializers/avro/src/test/resources/avro/avroTest3.avsc @@ -0,0 +1,11 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type3", + "fields": [ + {"name": "f", "type": "string"}, + {"name": "g", "type": "int"}, + {"name": "h", "type": "string"}, + {"name": "i", "type": "string"} + ] +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonDeserializer.java similarity index 68% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java rename to serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonDeserializer.java index 0a8837df6..7267e1495 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java +++ b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonDeserializer.java @@ -7,25 +7,28 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.json.impl; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import java.io.IOException; import java.io.InputStream; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; -class JsonDeserializer extends AbstractDeserializer { +public class JsonDeserializer extends AbstractDeserializer { private final JSONSchema jsonSchema; private final ObjectMapper objectMapper; - JsonDeserializer(String groupId, SchemaRegistryClient client, + public JsonDeserializer(String groupId, SchemaRegistryClient client, JSONSchema schema, SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, schema, true, decoders, encodingCache, encodeHeader); @@ -36,7 +39,7 @@ class JsonDeserializer extends AbstractDeserializer { } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + public final T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { return objectMapper.readValue(inputStream, jsonSchema.getDerived()); } } diff --git a/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonGenericDeserializer.java b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonGenericDeserializer.java new file mode 100644 index 000000000..53f08a345 --- /dev/null +++ b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonGenericDeserializer.java @@ -0,0 +1,39 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.json.impl; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; + +import java.io.IOException; +import java.io.InputStream; + +public class JsonGenericDeserializer extends AbstractDeserializer { + private final ObjectMapper objectMapper; + + public JsonGenericDeserializer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoders, encodingCache, encodeHeader); + this.objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); + } + + @Override + public final JsonNode deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + return objectMapper.readTree(inputStream); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonSerializer.java similarity index 74% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java rename to serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonSerializer.java index 8c896a3ab..709a4671d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java +++ b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonSerializer.java @@ -7,22 +7,23 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.json.impl; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.serializer.shared.codec.Encoder; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; import java.io.IOException; import java.io.OutputStream; -class JsonSerializer extends AbstractSerializer { +public class JsonSerializer extends AbstractSerializer { private final ObjectMapper objectMapper; - JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, + public JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, Encoder encoder, boolean registerSchema, boolean encodeHeader) { super(groupId, client, schema, encoder, registerSchema, encodeHeader); objectMapper = new ObjectMapper(); diff --git a/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonSerializerFactory.java b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonSerializerFactory.java new file mode 100644 index 000000000..b25536be4 --- /dev/null +++ b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonSerializerFactory.java @@ -0,0 +1,201 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.json.impl; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedAndGenericDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for json serializers and deserializers. + */ +@Slf4j +public class JsonSerializerFactory { + /** + * Creates a typed json serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an Json Schema. + * @param Type of event. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer serializer(@NonNull SerializerConfig config, @NonNull JSONSchema schema) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()); + } + + /** + * Creates a typed json deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an JSONSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use + * {@link #genericDeserializer(SerializerConfig)} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer deserializer(@NonNull SerializerConfig config, @NonNull JSONSchema schema) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new JsonDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()); + } + + /** + * Creates a generic json deserializer. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer genericDeserializer(@NonNull SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + } + + /** + * Creates a generic json deserializer which deserializes bytes into a json string. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer deserializeAsString(@NonNull SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + } + + /** + * A multiplexed Json serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer multiTypeSerializer( + @NonNull SerializerConfig config, @NonNull Map, JSONSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + Map, AbstractSerializer> serializerMap = getSerializerMap(config, schemas, groupId, schemaRegistryClient); + return new MultiplexedSerializer<>(serializerMap); + } + + private static Map, AbstractSerializer> getSerializerMap( + SerializerConfig config, Map, JSONSchema> schemas, String groupId, + SchemaRegistryClient schemaRegistryClient) { + return schemas.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()))); + } + + /** + * A multiplexed json Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer multiTypeDeserializer( + @NonNull SerializerConfig config, @NonNull Map, JSONSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = getDeserializerMap(config, schemas, groupId, schemaRegistryClient, encodingCache); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, config.getDecoders(), encodingCache); + } + + private static Map> getDeserializerMap( + SerializerConfig config, Map, JSONSchema> schemas, String groupId, + SchemaRegistryClient schemaRegistryClient, EncodingCache encodingCache) { + return schemas.values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()))); + } + + /** + * A multiplexed json Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer> typedOrGenericDeserializer( + @NonNull SerializerConfig config, @NonNull Map, JSONSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = getDeserializerMap(config, schemas, groupId, schemaRegistryClient, encodingCache); + JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, genericDeserializer, config.getDecoders(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonStringDeserializer.java similarity index 78% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java rename to serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonStringDeserializer.java index 5add11d9b..aeb092a80 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java +++ b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/impl/JsonStringDeserializer.java @@ -7,13 +7,16 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.json.impl; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import java.io.IOException; import java.io.InputStream; @@ -31,7 +34,7 @@ class JsonStringDeserializer extends AbstractDeserializer { } @Override - protected String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + public final String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Object obj = objectMapper.readValue(inputStream, Object.class); return objectMapper.writeValueAsString(obj); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/schemas/JSONSchema.java similarity index 97% rename from serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java rename to serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/schemas/JSONSchema.java index c0206d7e2..6c489bcef 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/json/src/main/java/io/pravega/schemaregistry/serializer/json/schemas/JSONSchema.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.schemas; +package io.pravega.schemaregistry.serializer.json.schemas; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -20,8 +20,8 @@ import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; import lombok.Getter; -import org.apache.avro.specific.SpecificRecordBase; import org.everit.json.schema.loader.SchemaLoader; import org.everit.json.schema.loader.SpecificationVersion; import org.json.JSONObject; @@ -133,7 +133,7 @@ public static JSONSchema of(String type, String schemaString, Class tC * @param tBase Base class whose type is used in the JSON schema object. * @param tDerived Class whose schema should be used. * @param Type of base class. - * @return Returns an AvroSchema with {@link SpecificRecordBase} type. + * @return Returns an JsonSchema of type T. */ public static JSONSchema ofBaseType(Class tDerived, Class tBase) { Preconditions.checkNotNull(tDerived); diff --git a/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/impl/SerializerTest.java b/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/impl/SerializerTest.java new file mode 100644 index 000000000..b64a7de20 --- /dev/null +++ b/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/impl/SerializerTest.java @@ -0,0 +1,203 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.json.impl; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.jsonFormatVisitors.JsonFormatTypes; +import com.fasterxml.jackson.databind.node.TextNode; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableMap; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.shared.testobjs.Address; +import io.pravega.schemaregistry.serializer.shared.testobjs.DerivedUser1; +import io.pravega.schemaregistry.serializer.shared.testobjs.DerivedUser2; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.never; + +public class SerializerTest { + @Test + public void testJsonSerializers() throws JsonProcessingException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); + JSONSchema schema2 = JSONSchema.of(DerivedUser2.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = JsonSerializerFactory.serializer(config, schema1); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serialized = serializer.serialize(user1); + + Serializer deserializer = JsonSerializerFactory.deserializer(config, schema1); + DerivedUser1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, user1); + + serialized = serializer.serialize(user1); + Serializer genericDeserializer = JsonSerializerFactory.genericDeserializer(config); + JsonNode generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.size(), 4); + + serialized = serializer.serialize(user1); + Serializer stringDeserializer = JsonSerializerFactory.deserializeAsString(config); + String str = stringDeserializer.deserialize(serialized); + assertFalse(Strings.isNullOrEmpty(str)); + + String schemaString = "{\"type\": \"object\",\"title\": \"The external data schema\",\"properties\": {\"content\": {\"type\": \"string\"}}}"; + + JSONSchema myData = JSONSchema.of("MyData", schemaString, HashMap.class); + VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + + Serializer serializer2 = JsonSerializerFactory.serializer(config, myData); + HashMap jsonObject = new HashMap<>(); + jsonObject.put("content", "mxx"); + + ByteBuffer s = serializer2.serialize(jsonObject); + str = stringDeserializer.deserialize(s); + + String stringSchema = new ObjectMapper().writeValueAsString(JsonSchema.minimalForFormat(JsonFormatTypes.STRING)); + + JSONSchema strSchema = JSONSchema.of("string", stringSchema, String.class); + VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); + doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); + doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); + doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); + + Serializer serializer3 = JsonSerializerFactory.serializer(config, strSchema); + Serializer deserializer3 = JsonSerializerFactory.deserializer(config, strSchema); + Serializer generic3 = JsonSerializerFactory.genericDeserializer(config); + String string = "a"; + s = serializer3.serialize(string); + Object x = deserializer3.deserialize(s); + assertNotNull(x); + assertEquals(x, string); + s = serializer3.serialize(string); + Object jsonNode = generic3.deserialize(s); + assertTrue(jsonNode instanceof TextNode); + assertEquals(((TextNode) jsonNode).textValue(), string); + // multi type + DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); + + JSONSchema schema1Base = JSONSchema.ofBaseType(DerivedUser1.class, Object.class); + JSONSchema schema2Base = JSONSchema.ofBaseType(DerivedUser2.class, Object.class); + Map, JSONSchema> map = new HashMap<>(); + map.put(DerivedUser1.class, schema1Base); + map.put(DerivedUser2.class, schema2Base); + Serializer multiSerializer = JsonSerializerFactory.multiTypeSerializer(config, map); + serialized = multiSerializer.serialize(user1); + Serializer multiDeserializer = JsonSerializerFactory.multiTypeDeserializer(config, map); + Object deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, user1); + + serialized = multiSerializer.serialize(user2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, user2); + + Map, JSONSchema> map2 = new HashMap<>(); + map2.put(DerivedUser1.class, schema1Base); + Serializer> fallbackDeserializer = JsonSerializerFactory.typedOrGenericDeserializer(config, map2); + serialized = multiSerializer.serialize(user1); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), user1); + + serialized = multiSerializer.serialize(user2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testNoEncodingJson() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") + .writeEncodingHeader(false).build(); + JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) + .properties(ImmutableMap.of()).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = JsonSerializerFactory.serializer(config, schema1); + verify(client, never()).getEncodingId(anyString(), any(), any()); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serialized = serializer.serialize(user1); + + Serializer deserializer = JsonSerializerFactory.deserializer(config, schema1); + verify(client, never()).getEncodingInfo(anyString(), any()); + DerivedUser1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, user1); + + serialized = serializer.serialize(user1); + + Serializer genericDeserializer = JsonSerializerFactory.genericDeserializer(config); + + JsonNode generic = genericDeserializer.deserialize(serialized); + assertNotNull(generic); + } + + @Data + @NoArgsConstructor + public static class TestClass { + private String test; + + public TestClass(String test) { + this.test = test; + } + } +} diff --git a/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/schemas/SchemasTest.java b/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/schemas/SchemasTest.java new file mode 100644 index 000000000..8c5373e72 --- /dev/null +++ b/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/schemas/SchemasTest.java @@ -0,0 +1,50 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.json.schemas; + +import com.fasterxml.jackson.databind.JsonNode; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.shared.testobjs.DerivedUser1; +import io.pravega.schemaregistry.serializer.shared.testobjs.DerivedUser2; +import io.pravega.schemaregistry.serializer.shared.testobjs.User; +import org.junit.Test; + +import static io.pravega.schemaregistry.serializer.json.testobjs.SchemaDefinitions.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +public class SchemasTest { + @Test + public void testJsonSchema() { + JSONSchema schema = JSONSchema.of(User.class); + assertNotNull(schema.getSchema()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING, String.class); + assertNotNull(schema2.getSchema()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema3 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_4, JsonNode.class); + assertNotNull(schema3.getSchema()); + assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema4 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_7, JsonNode.class); + assertNotNull(schema4.getSchema()); + assertEquals(schema4.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema baseSchema1 = JSONSchema.ofBaseType(DerivedUser1.class, User.class); + assertNotNull(baseSchema1.getSchema()); + assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema baseSchema2 = JSONSchema.ofBaseType(DerivedUser2.class, User.class); + assertNotNull(baseSchema2.getSchema()); + assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + } +} diff --git a/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/testobjs/SchemaDefinitions.java b/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/testobjs/SchemaDefinitions.java new file mode 100644 index 000000000..b4f398150 --- /dev/null +++ b/serializers/json/src/test/java/io/pravega/schemaregistry/serializer/json/testobjs/SchemaDefinitions.java @@ -0,0 +1,70 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.json.testobjs; + +public class SchemaDefinitions { + public static final String JSON_SCHEMA_STRING = "{" + + "\"title\": \"Person\", " + + "\"type\": \"object\", " + + "\"properties\": { " + + "\"name\": {" + + "\"type\": \"string\"" + + "}," + + "\"age\": {" + + "\"type\": \"integer\", \"minimum\": 0" + + "}" + + "}" + + "}"; + + public static final String JSON_SCHEMA_STRING_DRAFT_4 = "{\n" + + " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n" + + " \"title\": \"User\",\n" + + " \"id\": \"UserV4\",\n" + + " \"type\": \"object\",\n" + + "\t\n" + + " \"properties\": {\n" + + "\t\n" + + " \"id\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + "\t\t\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + "\t\t\n" + + " \"age\": {\n" + + " \"type\": \"number\",\n" + + " \"minimum\": 0,\n" + + " \"exclusiveMinimum\": true\n" + + " }\n" + + " },\n" + + "\t\n" + + " \"required\": [\"id\", \"name\", \"age\"]\n" + + "}"; + + public static final String JSON_SCHEMA_STRING_DRAFT_7 = "{\n" + + " \"$id\": \"UserV7\",\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + + " \"title\": \"User\",\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"firstName\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"lastName\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\",\n" + + " \"minimum\": 0\n" + + " }\n" + + " }\n" + + "}"; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufDeserializer.java similarity index 74% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java rename to serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufDeserializer.java index 9cd4a460f..f18e66c02 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java +++ b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufDeserializer.java @@ -7,14 +7,17 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.protobuf.impl; import com.google.common.base.Preconditions; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.InvalidProtocolBufferException; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import java.io.IOException; import java.io.InputStream; @@ -30,7 +33,7 @@ public class ProtobufDeserializer extends Abstract } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + public final T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { try { return protobufSchema.getParser().parseFrom(inputStream); } catch (InvalidProtocolBufferException e) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufGenericDeserializer.java similarity index 85% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java rename to serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufGenericDeserializer.java index 878bdbfb1..e0b9710b1 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java +++ b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufGenericDeserializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.protobuf.impl; import com.google.common.base.Preconditions; import com.google.common.base.Strings; @@ -17,7 +17,10 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.common.NameUtil; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import org.apache.commons.lang3.SerializationException; import javax.annotation.Nullable; @@ -28,7 +31,7 @@ public class ProtobufGenericDeserializer extends AbstractDeserializer { private final ConcurrentHashMap knownSchemas; - ProtobufGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, + public ProtobufGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, SerializerConfig.Decoders decoder, EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, schema, false, decoder, encodingCache, encodeHeader); Preconditions.checkArgument(isEncodeHeader() || schema != null); @@ -36,7 +39,7 @@ public class ProtobufGenericDeserializer extends AbstractDeserializer extends AbstractSerializer { - ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, +public class ProtobufSerializer extends AbstractSerializer { + public ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, Encoder encoder, boolean registerSchema, boolean encodeHeader) { super(groupId, client, schema, encoder, registerSchema, encodeHeader); } diff --git a/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufSerializerFactory.java b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufSerializerFactory.java new file mode 100644 index 000000000..1678c53db --- /dev/null +++ b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/impl/ProtobufSerializerFactory.java @@ -0,0 +1,191 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.protobuf.impl; + +import com.google.common.base.Preconditions; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.Message; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedAndGenericDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for protobuf serializers and deserializers. + */ +@Slf4j +public class ProtobufSerializerFactory { + /** + * Creates a typed protobuf serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an Protobuf Schema. + * @param Type of event. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer serializer(@NonNull SerializerConfig config, + @NonNull ProtobufSchema schema) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()); + } + + /** + * Creates a typed protobuf deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an ProtobufSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use + * {@link #genericDeserializer(SerializerConfig, ProtobufSchema)} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer deserializer(@NonNull SerializerConfig config, + @NonNull ProtobufSchema schema) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new ProtobufDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()); + } + + /** + * Creates a generic protobuf deserializer. It has the optional parameter for schema. + * If the schema is not supplied, the writer schema is used for deserialization into {@link DynamicMessage}. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an ProtobufSchema. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer genericDeserializer(@NonNull SerializerConfig config, @Nullable ProtobufSchema schema) { + Preconditions.checkArgument(schema != null || config.isWriteEncodingHeader(), + "Either read schema should be supplied or events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new ProtobufGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()); + } + + /** + * A multiplexed Protobuf serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer multiTypeSerializer( + @NonNull SerializerConfig config, @NonNull Map, ProtobufSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + + Map, AbstractSerializer> serializerMap = getSerializerMap(config, schemas, groupId, schemaRegistryClient); + return new MultiplexedSerializer<>(serializerMap); + } + + private static Map, AbstractSerializer> getSerializerMap( + SerializerConfig config, Map, ProtobufSchema> schemas, String groupId, SchemaRegistryClient schemaRegistryClient) { + return schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()))); + } + + /** + * A multiplexed protobuf Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer multiTypeDeserializer( + @NonNull SerializerConfig config, @NonNull Map, ProtobufSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = getDeserializerMap(config, schemas, groupId, schemaRegistryClient, encodingCache); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), encodingCache); + } + + /** + * A multiplexed protobuf Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer> typedOrGenericDeserializer( + @NonNull SerializerConfig config, @NonNull Map, ProtobufSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = getDeserializerMap(config, schemas, groupId, schemaRegistryClient, encodingCache); + ProtobufGenericDeserializer genericDeserializer = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoders(), encodingCache); + } + + private static Map> getDeserializerMap( + SerializerConfig config, Map, ProtobufSchema> schemas, String groupId, + SchemaRegistryClient schemaRegistryClient, EncodingCache encodingCache) { + return schemas.values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()))); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/schemas/ProtobufSchema.java similarity index 98% rename from serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java rename to serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/schemas/ProtobufSchema.java index 990763c34..5aff7b586 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/protobuf/src/main/java/io/pravega/schemaregistry/serializer/protobuf/schemas/ProtobufSchema.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.schemas; +package io.pravega.schemaregistry.serializer.protobuf.schemas; import com.google.common.collect.ImmutableMap; import com.google.protobuf.DynamicMessage; @@ -17,6 +17,7 @@ import com.google.protobuf.Parser; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; import lombok.AccessLevel; import lombok.Data; import lombok.Getter; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java b/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/generated/ProtobufTest.java similarity index 77% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java rename to serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/generated/ProtobufTest.java index 96a34c85e..5d04803cd 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java +++ b/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/generated/ProtobufTest.java @@ -1,7 +1,7 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: protobufTest.proto -package io.pravega.schemaregistry.testobjs.generated; +package io.pravega.schemaregistry.serializer.protobuf.generated; public final class ProtobufTest { private ProtobufTest() {} @@ -27,7 +27,7 @@ public interface InternalMessageOrBuilder extends * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; * @return The value. */ - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue(); + ProtobufTest.InternalMessage.Values getValue(); } /** * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage} @@ -102,15 +102,15 @@ private InternalMessage( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder.class); + ProtobufTest.InternalMessage.class, ProtobufTest.InternalMessage.Builder.class); } /** @@ -209,7 +209,7 @@ public Values findValueByNumber(int number) { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDescriptor().getEnumTypes().get(0); + return ProtobufTest.InternalMessage.getDescriptor().getEnumTypes().get(0); } private static final Values[] VALUES = values(); @@ -248,10 +248,10 @@ public int getValueValue() { * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; * @return The value. */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue() { + public ProtobufTest.InternalMessage.Values getValue() { @SuppressWarnings("deprecation") - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values result = io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.valueOf(value_); - return result == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; + ProtobufTest.InternalMessage.Values result = ProtobufTest.InternalMessage.Values.valueOf(value_); + return result == null ? ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @@ -268,7 +268,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (value_ != io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.val1.getNumber()) { + if (value_ != ProtobufTest.InternalMessage.Values.val1.getNumber()) { output.writeEnum(1, value_); } unknownFields.writeTo(output); @@ -280,7 +280,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (value_ != io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.val1.getNumber()) { + if (value_ != ProtobufTest.InternalMessage.Values.val1.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, value_); } @@ -294,10 +294,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage)) { + if (!(obj instanceof ProtobufTest.InternalMessage)) { return super.equals(obj); } - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) obj; + ProtobufTest.InternalMessage other = (ProtobufTest.InternalMessage) obj; if (value_ != other.value_) return false; if (!unknownFields.equals(other.unknownFields)) return false; @@ -318,69 +318,69 @@ public int hashCode() { return hash; } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom(byte[] data) + public static ProtobufTest.InternalMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom(java.io.InputStream input) + public static ProtobufTest.InternalMessage parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseDelimitedFrom(java.io.InputStream input) + public static ProtobufTest.InternalMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseDelimitedFrom( + public static ProtobufTest.InternalMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + public static ProtobufTest.InternalMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -393,7 +393,7 @@ public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Internal public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage prototype) { + public static Builder newBuilder(ProtobufTest.InternalMessage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -414,21 +414,21 @@ protected Builder newBuilderForType( public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.InternalMessage) - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder { + ProtobufTest.InternalMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder.class); + ProtobufTest.InternalMessage.class, ProtobufTest.InternalMessage.Builder.class); } - // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.newBuilder() + // Construct using io.pravega.schemaregistry.protobuf.testobjs.generated.ProtobufTest.InternalMessage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -454,17 +454,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstanceForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance(); + public ProtobufTest.InternalMessage getDefaultInstanceForType() { + return ProtobufTest.InternalMessage.getDefaultInstance(); } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage build() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage result = buildPartial(); + public ProtobufTest.InternalMessage build() { + ProtobufTest.InternalMessage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -472,8 +472,8 @@ public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage buildPartial() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage(this); + public ProtobufTest.InternalMessage buildPartial() { + ProtobufTest.InternalMessage result = new ProtobufTest.InternalMessage(this); result.value_ = value_; onBuilt(); return result; @@ -513,16 +513,16 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) { - return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage)other); + if (other instanceof ProtobufTest.InternalMessage) { + return mergeFrom((ProtobufTest.InternalMessage)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage other) { - if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance()) return this; + public Builder mergeFrom(ProtobufTest.InternalMessage other) { + if (other == ProtobufTest.InternalMessage.getDefaultInstance()) return this; if (other.value_ != 0) { setValueValue(other.getValueValue()); } @@ -541,11 +541,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parsedMessage = null; + ProtobufTest.InternalMessage parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) e.getUnfinishedMessage(); + parsedMessage = (ProtobufTest.InternalMessage) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -577,17 +577,17 @@ public Builder setValueValue(int value) { * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; * @return The value. */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue() { + public ProtobufTest.InternalMessage.Values getValue() { @SuppressWarnings("deprecation") - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values result = io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.valueOf(value_); - return result == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; + ProtobufTest.InternalMessage.Values result = ProtobufTest.InternalMessage.Values.valueOf(value_); + return result == null ? ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; } /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; * @param value The value to set. * @return This builder for chaining. */ - public Builder setValue(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values value) { + public Builder setValue(ProtobufTest.InternalMessage.Values value) { if (value == null) { throw new NullPointerException(); } @@ -623,12 +623,12 @@ public final Builder mergeUnknownFields( } // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage) - private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage DEFAULT_INSTANCE; + private static final ProtobufTest.InternalMessage DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage(); + DEFAULT_INSTANCE = new ProtobufTest.InternalMessage(); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstance() { + public static ProtobufTest.InternalMessage getDefaultInstance() { return DEFAULT_INSTANCE; } @@ -653,7 +653,7 @@ public com.google.protobuf.Parser getParserForType() { } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstanceForType() { + public ProtobufTest.InternalMessage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -672,11 +672,11 @@ public interface Message1OrBuilder extends * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; * @return The internal. */ - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal(); + ProtobufTest.InternalMessage getInternal(); /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder(); + ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder(); /** * string name = 2; @@ -737,11 +737,11 @@ private Message1( done = true; break; case 10: { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder subBuilder = null; + ProtobufTest.InternalMessage.Builder subBuilder = null; if (internal_ != null) { subBuilder = internal_.toBuilder(); } - internal_ = input.readMessage(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.parser(), extensionRegistry); + internal_ = input.readMessage(ProtobufTest.InternalMessage.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(internal_); internal_ = subBuilder.buildPartial(); @@ -776,19 +776,19 @@ private Message1( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.Builder.class); + ProtobufTest.Message1.class, ProtobufTest.Message1.Builder.class); } public static final int INTERNAL_FIELD_NUMBER = 1; - private io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage internal_; + private ProtobufTest.InternalMessage internal_; /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; * @return Whether the internal field is set. @@ -800,13 +800,13 @@ public boolean hasInternal() { * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; * @return The internal. */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal() { - return internal_ == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + public ProtobufTest.InternalMessage getInternal() { + return internal_ == null ? ProtobufTest.InternalMessage.getDefaultInstance() : internal_; } /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { + public ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { return getInternal(); } @@ -892,10 +892,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1)) { + if (!(obj instanceof ProtobufTest.Message1)) { return super.equals(obj); } - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) obj; + ProtobufTest.Message1 other = (ProtobufTest.Message1) obj; if (hasInternal() != other.hasInternal()) return false; if (hasInternal()) { @@ -926,69 +926,69 @@ public int hashCode() { return hash; } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom(byte[] data) + public static ProtobufTest.Message1 parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom(java.io.InputStream input) + public static ProtobufTest.Message1 parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseDelimitedFrom(java.io.InputStream input) + public static ProtobufTest.Message1 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseDelimitedFrom( + public static ProtobufTest.Message1 parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + public static ProtobufTest.Message1 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1001,7 +1001,7 @@ public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 prototype) { + public static Builder newBuilder(ProtobufTest.Message1 prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -1022,21 +1022,21 @@ protected Builder newBuilderForType( public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message1) - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1OrBuilder { + ProtobufTest.Message1OrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.Builder.class); + ProtobufTest.Message1.class, ProtobufTest.Message1.Builder.class); } - // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.newBuilder() + // Construct using io.pravega.schemaregistry.protobuf.testobjs.generated.ProtobufTest.Message1.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1068,17 +1068,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstanceForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.getDefaultInstance(); + public ProtobufTest.Message1 getDefaultInstanceForType() { + return ProtobufTest.Message1.getDefaultInstance(); } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 build() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 result = buildPartial(); + public ProtobufTest.Message1 build() { + ProtobufTest.Message1 result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -1086,8 +1086,8 @@ public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 build( } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 buildPartial() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1(this); + public ProtobufTest.Message1 buildPartial() { + ProtobufTest.Message1 result = new ProtobufTest.Message1(this); if (internalBuilder_ == null) { result.internal_ = internal_; } else { @@ -1132,16 +1132,16 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) { - return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1)other); + if (other instanceof ProtobufTest.Message1) { + return mergeFrom((ProtobufTest.Message1)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 other) { - if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.getDefaultInstance()) return this; + public Builder mergeFrom(ProtobufTest.Message1 other) { + if (other == ProtobufTest.Message1.getDefaultInstance()) return this; if (other.hasInternal()) { mergeInternal(other.getInternal()); } @@ -1164,11 +1164,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parsedMessage = null; + ProtobufTest.Message1 parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) e.getUnfinishedMessage(); + parsedMessage = (ProtobufTest.Message1) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -1178,9 +1178,9 @@ public Builder mergeFrom( return this; } - private io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage internal_; + private ProtobufTest.InternalMessage internal_; private com.google.protobuf.SingleFieldBuilderV3< - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder> internalBuilder_; + ProtobufTest.InternalMessage, ProtobufTest.InternalMessage.Builder, ProtobufTest.InternalMessageOrBuilder> internalBuilder_; /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; * @return Whether the internal field is set. @@ -1192,9 +1192,9 @@ public boolean hasInternal() { * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; * @return The internal. */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal() { + public ProtobufTest.InternalMessage getInternal() { if (internalBuilder_ == null) { - return internal_ == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + return internal_ == null ? ProtobufTest.InternalMessage.getDefaultInstance() : internal_; } else { return internalBuilder_.getMessage(); } @@ -1202,7 +1202,7 @@ public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ - public Builder setInternal(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage value) { + public Builder setInternal(ProtobufTest.InternalMessage value) { if (internalBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -1219,7 +1219,7 @@ public Builder setInternal(io.pravega.schemaregistry.testobjs.generated.Protobuf * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ public Builder setInternal( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder builderForValue) { + ProtobufTest.InternalMessage.Builder builderForValue) { if (internalBuilder_ == null) { internal_ = builderForValue.build(); onChanged(); @@ -1232,11 +1232,11 @@ public Builder setInternal( /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ - public Builder mergeInternal(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage value) { + public Builder mergeInternal(ProtobufTest.InternalMessage value) { if (internalBuilder_ == null) { if (internal_ != null) { internal_ = - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.newBuilder(internal_).mergeFrom(value).buildPartial(); + ProtobufTest.InternalMessage.newBuilder(internal_).mergeFrom(value).buildPartial(); } else { internal_ = value; } @@ -1264,7 +1264,7 @@ public Builder clearInternal() { /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder getInternalBuilder() { + public ProtobufTest.InternalMessage.Builder getInternalBuilder() { onChanged(); return getInternalFieldBuilder().getBuilder(); @@ -1272,23 +1272,23 @@ public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { + public ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { if (internalBuilder_ != null) { return internalBuilder_.getMessageOrBuilder(); } else { return internal_ == null ? - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + ProtobufTest.InternalMessage.getDefaultInstance() : internal_; } } /** * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; */ private com.google.protobuf.SingleFieldBuilderV3< - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder> + ProtobufTest.InternalMessage, ProtobufTest.InternalMessage.Builder, ProtobufTest.InternalMessageOrBuilder> getInternalFieldBuilder() { if (internalBuilder_ == null) { internalBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder>( + ProtobufTest.InternalMessage, ProtobufTest.InternalMessage.Builder, ProtobufTest.InternalMessageOrBuilder>( getInternal(), getParentForChildren(), isClean()); @@ -1389,12 +1389,12 @@ public final Builder mergeUnknownFields( } // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message1) - private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 DEFAULT_INSTANCE; + private static final ProtobufTest.Message1 DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1(); + DEFAULT_INSTANCE = new ProtobufTest.Message1(); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstance() { + public static ProtobufTest.Message1 getDefaultInstance() { return DEFAULT_INSTANCE; } @@ -1419,7 +1419,7 @@ public com.google.protobuf.Parser getParserForType() { } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstanceForType() { + public ProtobufTest.Message1 getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -1525,15 +1525,15 @@ private Message2( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.Builder.class); + ProtobufTest.Message2.class, ProtobufTest.Message2.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @@ -1628,10 +1628,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2)) { + if (!(obj instanceof ProtobufTest.Message2)) { return super.equals(obj); } - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) obj; + ProtobufTest.Message2 other = (ProtobufTest.Message2) obj; if (!getName() .equals(other.getName())) return false; @@ -1657,69 +1657,69 @@ public int hashCode() { return hash; } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom(byte[] data) + public static ProtobufTest.Message2 parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom(java.io.InputStream input) + public static ProtobufTest.Message2 parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseDelimitedFrom(java.io.InputStream input) + public static ProtobufTest.Message2 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseDelimitedFrom( + public static ProtobufTest.Message2 parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + public static ProtobufTest.Message2 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1732,7 +1732,7 @@ public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 prototype) { + public static Builder newBuilder(ProtobufTest.Message2 prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -1753,21 +1753,21 @@ protected Builder newBuilderForType( public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message2) - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2OrBuilder { + ProtobufTest.Message2OrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.Builder.class); + ProtobufTest.Message2.class, ProtobufTest.Message2.Builder.class); } - // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.newBuilder() + // Construct using io.pravega.schemaregistry.protobuf.testobjs.generated.ProtobufTest.Message2.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1795,17 +1795,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstanceForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.getDefaultInstance(); + public ProtobufTest.Message2 getDefaultInstanceForType() { + return ProtobufTest.Message2.getDefaultInstance(); } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 build() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 result = buildPartial(); + public ProtobufTest.Message2 build() { + ProtobufTest.Message2 result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -1813,8 +1813,8 @@ public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 build( } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 buildPartial() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2(this); + public ProtobufTest.Message2 buildPartial() { + ProtobufTest.Message2 result = new ProtobufTest.Message2(this); result.name_ = name_; result.field1_ = field1_; onBuilt(); @@ -1855,16 +1855,16 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) { - return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2)other); + if (other instanceof ProtobufTest.Message2) { + return mergeFrom((ProtobufTest.Message2)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 other) { - if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.getDefaultInstance()) return this; + public Builder mergeFrom(ProtobufTest.Message2 other) { + if (other == ProtobufTest.Message2.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); @@ -1887,11 +1887,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parsedMessage = null; + ProtobufTest.Message2 parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) e.getUnfinishedMessage(); + parsedMessage = (ProtobufTest.Message2) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -2023,12 +2023,12 @@ public final Builder mergeUnknownFields( } // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message2) - private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 DEFAULT_INSTANCE; + private static final ProtobufTest.Message2 DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2(); + DEFAULT_INSTANCE = new ProtobufTest.Message2(); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstance() { + public static ProtobufTest.Message2 getDefaultInstance() { return DEFAULT_INSTANCE; } @@ -2053,7 +2053,7 @@ public com.google.protobuf.Parser getParserForType() { } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstanceForType() { + public ProtobufTest.Message2 getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -2170,15 +2170,15 @@ private Message3( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.Builder.class); + ProtobufTest.Message3.class, ProtobufTest.Message3.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @@ -2290,10 +2290,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3)) { + if (!(obj instanceof ProtobufTest.Message3)) { return super.equals(obj); } - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) obj; + ProtobufTest.Message3 other = (ProtobufTest.Message3) obj; if (!getName() .equals(other.getName())) return false; @@ -2323,69 +2323,69 @@ public int hashCode() { return hash; } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom(byte[] data) + public static ProtobufTest.Message3 parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom(java.io.InputStream input) + public static ProtobufTest.Message3 parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseDelimitedFrom(java.io.InputStream input) + public static ProtobufTest.Message3 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseDelimitedFrom( + public static ProtobufTest.Message3 parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + public static ProtobufTest.Message3 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2398,7 +2398,7 @@ public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 prototype) { + public static Builder newBuilder(ProtobufTest.Message3 prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -2419,21 +2419,21 @@ protected Builder newBuilderForType( public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message3) - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3OrBuilder { + ProtobufTest.Message3OrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable .ensureFieldAccessorsInitialized( - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.Builder.class); + ProtobufTest.Message3.class, ProtobufTest.Message3.Builder.class); } - // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.newBuilder() + // Construct using io.pravega.schemaregistry.protobuf.testobjs.generated.ProtobufTest.Message3.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2463,17 +2463,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + return ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstanceForType() { - return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.getDefaultInstance(); + public ProtobufTest.Message3 getDefaultInstanceForType() { + return ProtobufTest.Message3.getDefaultInstance(); } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 build() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 result = buildPartial(); + public ProtobufTest.Message3 build() { + ProtobufTest.Message3 result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -2481,8 +2481,8 @@ public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 build( } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 buildPartial() { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3(this); + public ProtobufTest.Message3 buildPartial() { + ProtobufTest.Message3 result = new ProtobufTest.Message3(this); result.name_ = name_; result.field1_ = field1_; result.field2_ = field2_; @@ -2524,16 +2524,16 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) { - return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3)other); + if (other instanceof ProtobufTest.Message3) { + return mergeFrom((ProtobufTest.Message3)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 other) { - if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.getDefaultInstance()) return this; + public Builder mergeFrom(ProtobufTest.Message3 other) { + if (other == ProtobufTest.Message3.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); @@ -2559,11 +2559,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parsedMessage = null; + ProtobufTest.Message3 parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) e.getUnfinishedMessage(); + parsedMessage = (ProtobufTest.Message3) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -2725,12 +2725,12 @@ public final Builder mergeUnknownFields( } // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message3) - private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 DEFAULT_INSTANCE; + private static final ProtobufTest.Message3 DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3(); + DEFAULT_INSTANCE = new ProtobufTest.Message3(); } - public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstance() { + public static ProtobufTest.Message3 getDefaultInstance() { return DEFAULT_INSTANCE; } @@ -2755,7 +2755,7 @@ public com.google.protobuf.Parser getParserForType() { } @java.lang.Override - public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstanceForType() { + public ProtobufTest.Message3 getDefaultInstanceForType() { return DEFAULT_INSTANCE; } diff --git a/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/impl/SerializerTest.java b/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/impl/SerializerTest.java new file mode 100644 index 000000000..92738ad2f --- /dev/null +++ b/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/impl/SerializerTest.java @@ -0,0 +1,159 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.protobuf.impl; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import io.pravega.test.common.AssertExtensions; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.never; + +public class SerializerTest { + @Test + public void testProtobufSerializers() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message3.class, descriptorSet); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = ProtobufSerializerFactory.serializer(config, schema1); + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serialized = serializer.serialize(message); + + Serializer deserializer = ProtobufSerializerFactory.deserializer(config, schema1); + ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, message); + + serialized = serializer.serialize(message); + Serializer genericDeserializer = ProtobufSerializerFactory.genericDeserializer(config, null); + DynamicMessage generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getAllFields().size(), 2); + + // multi type + ProtobufTest.Message3 message2 = ProtobufTest.Message3.newBuilder().setName("name").setField1(1).setField2(2).build(); + + ProtobufSchema schema1Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message3.class, descriptorSet); + Map, ProtobufSchema> map = new HashMap<>(); + map.put(ProtobufTest.Message2.class, schema1Base); + map.put(ProtobufTest.Message3.class, schema2Base); + Serializer multiSerializer = ProtobufSerializerFactory.multiTypeSerializer(config, map); + serialized = multiSerializer.serialize(message); + Serializer multiDeserializer = ProtobufSerializerFactory.multiTypeDeserializer(config, map); + GeneratedMessageV3 deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, message); + + serialized = multiSerializer.serialize(message2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, message2); + + Map, ProtobufSchema> map2 = new HashMap<>(); + map2.put(ProtobufTest.Message2.class, schema1Base); + Serializer> fallbackDeserializer = ProtobufSerializerFactory.typedOrGenericDeserializer(config, map2); + serialized = multiSerializer.serialize(message); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), message); + + serialized = multiSerializer.serialize(message2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testNoEncodingProto() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") + .writeEncodingHeader(false).build(); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) + .properties(ImmutableMap.of()).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = ProtobufSerializerFactory.serializer(config, schema1); + verify(client, never()).getEncodingId(anyString(), any(), any()); + + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serialized = serializer.serialize(message); + + Serializer deserializer = ProtobufSerializerFactory.deserializer(config, schema1); + verify(client, never()).getEncodingInfo(anyString(), any()); + + ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, message); + + serialized = serializer.serialize(message); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> ProtobufSerializerFactory.genericDeserializer(config, null)); + + SchemaInfo latestSchema = client.getLatestSchemaVersion("groupId", null).getSchemaInfo(); + ProtobufSchema schemaDynamic = ProtobufSchema.of(latestSchema.getType(), descriptorSet); + Serializer genericDeserializer = ProtobufSerializerFactory.genericDeserializer(config, schemaDynamic); + + DynamicMessage generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getAllFields().size(), 2); + } +} diff --git a/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/schemas/SchemasTest.java b/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/schemas/SchemasTest.java new file mode 100644 index 000000000..f5c969031 --- /dev/null +++ b/serializers/protobuf/src/test/java/io/pravega/schemaregistry/serializer/protobuf/schemas/SchemasTest.java @@ -0,0 +1,70 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.protobuf.schemas; + +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest; +import org.junit.Test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +public class SchemasTest { + @Test + public void testProtobufSchema() throws IOException { + ProtobufSchema sm1 = ProtobufSchema.of(ProtobufTest.Message1.class); + assertNotNull(sm1.getParser()); + assertNotNull(sm1.getFileDescriptorSet()); + assertEquals(sm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema bm1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class); + assertNotNull(bm1.getParser()); + assertNotNull(bm1.getFileDescriptorSet()); + assertEquals(bm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema bm2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class); + assertNotNull(bm2.getParser()); + assertNotNull(bm2.getFileDescriptorSet()); + assertEquals(bm2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + + ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class.getName(), descriptorSet); + assertNull(schema.getParser()); + assertNotNull(schema.getFileDescriptorSet()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message1.class, descriptorSet); + assertNotNull(schema2.getParser()); + assertNotNull(schema2.getFileDescriptorSet()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema baseSchema1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class, descriptorSet); + assertNotNull(baseSchema1.getParser()); + assertNotNull(baseSchema1.getFileDescriptorSet()); + assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema baseSchema2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); + assertNotNull(baseSchema2.getParser()); + assertNotNull(baseSchema2.getFileDescriptorSet()); + assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + } +} diff --git a/serializers/protobuf/src/test/resources/proto/protobufTest.pb b/serializers/protobuf/src/test/resources/proto/protobufTest.pb new file mode 100644 index 000000000..b0226bf03 Binary files /dev/null and b/serializers/protobuf/src/test/resources/proto/protobufTest.pb differ diff --git a/serializers/protobuf/src/test/resources/proto/protobufTest.proto b/serializers/protobuf/src/test/resources/proto/protobufTest.proto new file mode 100644 index 000000000..4edfcae2b --- /dev/null +++ b/serializers/protobuf/src/test/resources/proto/protobufTest.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; +package io.pravega.schemaregistry.testobjs.generated; + +message InternalMessage { + enum Values { + val1 = 0; + val2 = 1; + val3 = 2; + val4 = 3; + } + Values value = 1; +} + +message Message1 { + InternalMessage internal = 1; + string name = 2; +} + +message Message2 { + string name = 1; + int32 field1 = 2; +} + +message Message3 { + string name = 1; + int32 field1 = 2; + int32 field2 = 3; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Codec.java similarity index 95% rename from serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Codec.java index b2e1f26ff..cb5c8931b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Codec.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.codec; +package io.pravega.schemaregistry.serializer.shared.codec; import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.EncodingInfo; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Codecs.java similarity index 98% rename from serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Codecs.java index 5f5155c05..710917e81 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Codecs.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.codec; +package io.pravega.schemaregistry.serializer.shared.codec; import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream; import io.pravega.schemaregistry.contract.data.CodecType; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Decoder.java similarity index 94% rename from serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Decoder.java index 5c0d7f3a8..31f9c0ec7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Decoder.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.codec; +package io.pravega.schemaregistry.serializer.shared.codec; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Encoder.java similarity index 95% rename from serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Encoder.java index e6e9764e8..4fe5ba3b0 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/codec/Encoder.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.codec; +package io.pravega.schemaregistry.serializer.shared.codec; import io.pravega.schemaregistry.contract.data.CodecType; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/AbstractDeserializer.java similarity index 92% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/AbstractDeserializer.java index eb3e13091..a2568d096 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/AbstractDeserializer.java @@ -7,14 +7,14 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.base.Preconditions; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.Schema; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -25,7 +25,7 @@ import java.nio.ByteBuffer; @Slf4j -abstract class AbstractDeserializer extends BaseDeserializer { +public abstract class AbstractDeserializer extends BaseDeserializer { private static final int HEADER_SIZE = 1 + Integer.BYTES; private final String groupId; @@ -114,9 +114,9 @@ public T deserialize(ByteBuffer data) { return deserialize(inputStream, writerSchema, readerSchema); } - protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException; + public abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException; - boolean isEncodeHeader() { + protected boolean isEncodeHeader() { return encodeHeader; } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/AbstractSerializer.java similarity index 92% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/AbstractSerializer.java index 66d462d43..799670f22 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/AbstractSerializer.java @@ -7,18 +7,18 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.base.Preconditions; import io.pravega.common.io.EnhancedByteArrayOutputStream; import io.pravega.common.util.BitConverter; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codecs; -import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.serializer.shared.codec.Encoder; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.VersionInfo; -import io.pravega.schemaregistry.schemas.Schema; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; import lombok.Getter; import lombok.SneakyThrows; @@ -27,7 +27,7 @@ import java.nio.ByteBuffer; import java.util.concurrent.atomic.AtomicReference; -abstract class AbstractSerializer extends BaseSerializer { +public abstract class AbstractSerializer extends BaseSerializer { private static final byte PROTOCOL = 0x1; private final String groupId; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/BaseDeserializer.java similarity index 90% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/BaseDeserializer.java index 015a2036f..bd99c2aa3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/BaseDeserializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import io.pravega.client.stream.Serializer; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/BaseSerializer.java similarity index 90% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/BaseSerializer.java index 8b7844fc2..fbe515de8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/BaseSerializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import io.pravega.client.stream.Serializer; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomDeserializer.java similarity index 90% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomDeserializer.java index a3575c87e..e9bc543f4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomDeserializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import io.pravega.schemaregistry.contract.data.SchemaInfo; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomSerializer.java similarity index 89% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomSerializer.java index 7a508b734..ef97f4a8d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomSerializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import io.pravega.schemaregistry.contract.data.SchemaInfo; diff --git a/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomSerializerFactory.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomSerializerFactory.java new file mode 100644 index 000000000..4240dbc6a --- /dev/null +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/CustomSerializerFactory.java @@ -0,0 +1,78 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.shared.impl; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.io.InputStream; +import java.io.OutputStream; + +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for Custom serializers and deserializers. + */ +@Slf4j +public class CustomSerializerFactory { + /** + * A serializer that uses user supplied implementation of {@link CustomSerializer} for serializing the objects. + * It also takes user supplied schema and registers/validates it against the registry. + * + * @param config Serializer config. + * @param schema Schema for the object to serialize + * @param serializer user supplied serializer + * @param Type of object to serialize + * @return Serializer that uses user supplied serialization function for serializing events. + */ + public static Serializer serializer(@NonNull SerializerConfig config, @NonNull Schema schema, @NonNull CustomSerializer serializer) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new AbstractSerializer(groupId, schemaRegistryClient, + schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { + @Override + protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { + serializer.serialize(var, schema, outputStream); + } + }; + } + + /** + * A deserializer that uses user supplied implementation of {@link CustomDeserializer} for deserializing the data into + * typed java objects. + * + * @param config Serializer config. + * @param schema optional Schema for the object to deserialize + * @param deserializer user supplied deserializer + * @param Type of object to deserialize + * @return Deserializer that uses user supplied deserialization function for deserializing payload into typed events. + */ + public static Serializer deserializer(@NonNull SerializerConfig config, @Nullable Schema schema, + @NonNull CustomDeserializer deserializer) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { + @Override + public final T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return deserializer.deserialize(inputStream, writerSchema, readerSchema); + } + }; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/EncodingCache.java similarity index 94% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/EncodingCache.java index bf1910767..649149b4e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/EncodingCache.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; @@ -29,7 +29,7 @@ public class EncodingCache { private static final int MAXIMUM_SIZE = 1000; private final LoadingCache encodingCache; - EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { + public EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { this(groupId, schemaRegistryClient, MAXIMUM_SIZE); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedAndGenericDeserializer.java similarity index 76% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedAndGenericDeserializer.java index 1baf69f21..2afc5d337 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedAndGenericDeserializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.base.Preconditions; import io.pravega.schemaregistry.client.SchemaRegistryClient; @@ -18,11 +18,11 @@ import java.io.InputStream; import java.util.Map; -class MultiplexedAndGenericDeserializer extends AbstractDeserializer> { +public class MultiplexedAndGenericDeserializer extends AbstractDeserializer> { private final Map> deserializers; private final AbstractDeserializer genericDeserializer; - MultiplexedAndGenericDeserializer(String groupId, SchemaRegistryClient client, + public MultiplexedAndGenericDeserializer(String groupId, SchemaRegistryClient client, Map> deserializers, AbstractDeserializer genericDeserializer, SerializerConfig.Decoders decoders, @@ -33,13 +33,13 @@ class MultiplexedAndGenericDeserializer extends AbstractDeserializer deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + public final Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); AbstractDeserializer deserializer = deserializers.get(writerSchema.getType()); if (deserializer == null) { return Either.right(genericDeserializer.deserialize(inputStream, writerSchema, readerSchema)); } else { - return Either.left(deserializers.get(writerSchema.getType()).deserialize(inputStream, writerSchema, readerSchema)); + return Either.left(deserializer.deserialize(inputStream, writerSchema, readerSchema)); } } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedDeserializer.java similarity index 82% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedDeserializer.java index d10c5af21..bc4d655eb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedDeserializer.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.base.Preconditions; import io.pravega.schemaregistry.client.SchemaRegistryClient; @@ -24,10 +24,10 @@ * * @param Type of object. */ -class MultiplexedDeserializer extends AbstractDeserializer { +public class MultiplexedDeserializer extends AbstractDeserializer { private final Map> deserializers; - MultiplexedDeserializer(String groupId, SchemaRegistryClient client, + public MultiplexedDeserializer(String groupId, SchemaRegistryClient client, Map> deserializers, SerializerConfig.Decoders decoders, EncodingCache encodingCache) { @@ -36,7 +36,7 @@ class MultiplexedDeserializer extends AbstractDeserializer { } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + public final T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); AbstractDeserializer deserializer = deserializers.get(writerSchema.getType()); if (deserializer == null) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedSerializer.java similarity index 72% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedSerializer.java index 34392b620..f440c0caf 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/MultiplexedSerializer.java @@ -7,9 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; - -import io.pravega.client.stream.Serializer; +package io.pravega.schemaregistry.serializer.shared.impl; import java.nio.ByteBuffer; import java.util.Map; @@ -19,10 +17,10 @@ * * @param Type of object. */ -class MultiplexedSerializer implements Serializer { +public class MultiplexedSerializer extends BaseSerializer { private final Map, AbstractSerializer> serializers; - MultiplexedSerializer(Map, AbstractSerializer> serializers) { + public MultiplexedSerializer(Map, AbstractSerializer> serializers) { this.serializers = serializers; } @@ -33,9 +31,4 @@ public ByteBuffer serialize(T obj) { AbstractSerializer serializer = serializers.get(tClass); return serializer.serialize(obj); } - - @Override - public T deserialize(ByteBuffer serializedValue) { - throw new IllegalStateException(); - } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/SerializerConfig.java similarity index 97% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/SerializerConfig.java index a9c648bdb..2925808c9 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/SerializerConfig.java @@ -7,16 +7,16 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; -import io.pravega.schemaregistry.codec.Codec; -import io.pravega.schemaregistry.codec.Codecs; -import io.pravega.schemaregistry.codec.Decoder; -import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.serializer.shared.codec.Codec; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.serializer.shared.codec.Decoder; +import io.pravega.schemaregistry.serializer.shared.codec.Encoder; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.Compatibility; @@ -255,7 +255,7 @@ public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) } } - static class Decoders { + public static class Decoders { private final ConcurrentHashMap decoders; Decoders() { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/SerializerFactoryHelper.java similarity index 88% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/SerializerFactoryHelper.java index c7816189a..27a576963 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/impl/SerializerFactoryHelper.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.base.Strings; import io.pravega.client.ClientConfig; @@ -22,15 +22,15 @@ import java.util.stream.Collectors; @Slf4j -class SerializerFactoryHelper { - static SchemaRegistryClient initForSerializer(SerializerConfig config) { +public class SerializerFactoryHelper { + public static SchemaRegistryClient initForSerializer(SerializerConfig config) { SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); createGroup(schemaRegistryClient, config); registerCodec(schemaRegistryClient, config); return schemaRegistryClient; } - static SchemaRegistryClient initForDeserializer(SerializerConfig config) { + public static SchemaRegistryClient initForDeserializer(SerializerConfig config) { SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); createGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); @@ -44,8 +44,8 @@ private static SchemaRegistryClient getSchemaRegistryClient(SerializerConfig con SchemaRegistryClientConfig left = config.getRegistryConfigOrClient().getLeft(); if (left.isAuthEnabled() && Strings.isNullOrEmpty(left.getAuthMethod())) { Credentials creds = ClientConfig.builder().build().getCredentials(); - left = SchemaRegistryClientConfig.builder().schemaRegistryUri(left.getSchemaRegistryUri()).authEnabled(left.isAuthEnabled()) - .authMethod(creds.getAuthenticationType()).authToken(creds.getAuthenticationToken()) + left = SchemaRegistryClientConfig.builder().schemaRegistryUri(left.getSchemaRegistryUri()) + .authentication(creds.getAuthenticationType(), creds.getAuthenticationToken()) .build(); } return SchemaRegistryClientFactory.withNamespace(config.getNamespace(), left); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/schemas/Schema.java similarity index 94% rename from serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java rename to serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/schemas/Schema.java index a498779eb..2c2a22f1d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java +++ b/serializers/shared/src/main/java/io/pravega/schemaregistry/serializer/shared/schemas/Schema.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.schemas; +package io.pravega.schemaregistry.serializer.shared.schemas; import io.pravega.schemaregistry.contract.data.SchemaInfo; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/codec/CodecTest.java similarity index 97% rename from serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java rename to serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/codec/CodecTest.java index 6e86596b1..7b7a660d7 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/codec/CodecTest.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.codec; +package io.pravega.schemaregistry.serializer.shared.codec; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/impl/CacheTest.java similarity index 96% rename from serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java rename to serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/impl/CacheTest.java index 313355dbd..496d34ad6 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java +++ b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/impl/CacheTest.java @@ -7,16 +7,16 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.serializer.shared.impl; import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codecs; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; import org.junit.Test; import java.nio.ByteBuffer; diff --git a/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/Address.java b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/Address.java new file mode 100644 index 000000000..94c6a3ead --- /dev/null +++ b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/Address.java @@ -0,0 +1,22 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.shared.testobjs; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class Address { + private String streetAddress; + private String city; +} diff --git a/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/DerivedUser1.java b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/DerivedUser1.java new file mode 100644 index 000000000..542855042 --- /dev/null +++ b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/DerivedUser1.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.shared.testobjs; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class DerivedUser1 extends User { + @Getter + private String user1; + + public DerivedUser1(String name, Address address, int age, String user1) { + super(name, address, age); + this.user1 = user1; + } +} diff --git a/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/DerivedUser2.java b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/DerivedUser2.java new file mode 100644 index 000000000..d4d3036a8 --- /dev/null +++ b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/DerivedUser2.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.shared.testobjs; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class DerivedUser2 extends User { + @Getter + private String user2; + + public DerivedUser2(String name, Address address, int age, String user2) { + super(name, address, age); + this.user2 = user2; + } +} diff --git a/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/User.java b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/User.java new file mode 100644 index 000000000..0ad7ffa4b --- /dev/null +++ b/serializers/shared/src/test/java/io/pravega/schemaregistry/serializer/shared/testobjs/User.java @@ -0,0 +1,27 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializer.shared.testobjs; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class User { + @Getter + private String name; + @Getter + private Address address; + @Getter + private int age; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java deleted file mode 100644 index 5e9cb8170..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java +++ /dev/null @@ -1,113 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import com.google.common.base.Preconditions; -import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.common.Either; -import io.pravega.schemaregistry.schemas.AvroSchema; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nullable; -import java.util.Map; -import java.util.stream.Collectors; - -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; - -/** - * Internal Factory class for Avro serializers and deserializers. - */ -@Slf4j -class AvroSerializerFactory { - static Serializer serializer(SerializerConfig config, AvroSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - String groupId = config.getGroupId(); - return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), config.isRegisterSchema()); - } - - static Serializer deserializer(SerializerConfig config, AvroSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - String groupId = config.getGroupId(); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new AvroDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); - } - - static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new AvroGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); - } - - static Serializer multiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - Map, AbstractSerializer> serializerMap = schemas - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), - config.isRegisterSchema()))); - return new MultiplexedSerializer<>(serializerMap); - } - - static Serializer multiTypeDeserializer( - SerializerConfig config, Map, AvroSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), - encodingCache); - } - - static Serializer> typedOrGenericDeserializer( - SerializerConfig config, Map, AvroSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); - AbstractDeserializer genericDeserializer = new AvroGenericDeserializer(groupId, schemaRegistryClient, - null, config.getDecoders(), encodingCache); - return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, - config.getDecoders(), encodingCache); - } -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java deleted file mode 100644 index 380a067af..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import com.google.common.base.Preconditions; -import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.Schema; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nullable; -import java.io.InputStream; -import java.io.OutputStream; - -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; - -/** - * Internal Factory class for Custom serializers and deserializers. - */ -@Slf4j -class CustomSerializerFactory { - static Serializer serializer(SerializerConfig config, Schema schema, CustomSerializer serializer) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - Preconditions.checkNotNull(serializer); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new AbstractSerializer(groupId, schemaRegistryClient, - schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { - @Override - protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { - serializer.serialize(var, schema, outputStream); - } - }; - } - - static Serializer deserializer(SerializerConfig config, @Nullable Schema schema, - CustomDeserializer deserializer) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(deserializer); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, - config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { - @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { - return deserializer.deserialize(inputStream, writerSchema, readerSchema); - } - }; - } -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java deleted file mode 100644 index 76dc8c983..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ /dev/null @@ -1,128 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.common.Either; -import io.pravega.schemaregistry.schemas.JSONSchema; -import lombok.extern.slf4j.Slf4j; - -import java.util.Map; -import java.util.stream.Collectors; - -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; - -/** - * Internal Factory class for json serializers and deserializers. - */ -@Slf4j -class JsonSerializerFactory { - static Serializer serializer(SerializerConfig config, JSONSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), - config.isRegisterSchema(), config.isWriteEncodingHeader()); - } - - static Serializer deserializer(SerializerConfig config, JSONSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - // schema can be null in which case deserialization will happen into dynamic message - return new JsonDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, - config.isWriteEncodingHeader()); - } - - static Serializer> genericDeserializer(SerializerConfig config) { - Preconditions.checkNotNull(config); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - String groupId = config.getGroupId(); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), - encodingCache, config.isWriteEncodingHeader()); - } - - static Serializer jsonStringDeserializer(SerializerConfig config) { - Preconditions.checkNotNull(config); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - String groupId = config.getGroupId(); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); - } - - static Serializer multiTypeSerializer( - SerializerConfig config, Map, JSONSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - Map, AbstractSerializer> serializerMap = schemas - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), - config.isRegisterSchema(), config.isWriteEncodingHeader()))); - return new MultiplexedSerializer<>(serializerMap); - } - - static Serializer multiTypeDeserializer( - SerializerConfig config, Map, JSONSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), - encodingCache, config.isWriteEncodingHeader()))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, - deserializerMap, config.getDecoders(), encodingCache); - } - - static Serializer>> typedOrGenericDeserializer( - SerializerConfig config, Map, JSONSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, - config.isWriteEncodingHeader()))); - JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), - encodingCache, config.isWriteEncodingHeader()); - - return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, - deserializerMap, genericDeserializer, config.getDecoders(), encodingCache); - } -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonWithSchemaDeserializer.java similarity index 64% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonWithSchemaDeserializer.java index 29daaa05d..d5eebac4f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonWithSchemaDeserializer.java @@ -15,22 +15,25 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import java.io.IOException; import java.io.InputStream; -class JsonGenericDeserializer extends AbstractDeserializer> { +class JsonWithSchemaDeserializer extends AbstractDeserializer> { private final ObjectMapper objectMapper; - JsonGenericDeserializer(String groupId, SchemaRegistryClient client, - SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + JsonWithSchemaDeserializer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, null, false, decoders, encodingCache, encodeHeader); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); } - + @Override - protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + public final WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { JsonNode obj = objectMapper.readTree(inputStream); return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (JsonNode) y); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index bee87cb01..f2a46df0c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -13,13 +13,26 @@ import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.avro.impl.AvroGenericDeserializer; +import io.pravega.schemaregistry.serializer.avro.impl.AvroSerializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; -import io.pravega.schemaregistry.schemas.AvroSchema; -import io.pravega.schemaregistry.schemas.JSONSchema; -import io.pravega.schemaregistry.schemas.ProtobufSchema; -import io.pravega.schemaregistry.schemas.Schema; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.json.impl.JsonGenericDeserializer; +import io.pravega.schemaregistry.serializer.json.impl.JsonSerializer; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.protobuf.impl.ProtobufGenericDeserializer; +import io.pravega.schemaregistry.serializer.protobuf.impl.ProtobufSerializer; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.CustomDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.CustomSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -31,9 +44,9 @@ import java.util.function.BiFunction; import java.util.function.Function; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForSerializer; /** * Internal Factory class for multi format serializers and deserializers. @@ -43,14 +56,12 @@ @Slf4j class MultiFormatSerializerFactory { // region multi format - static Serializer> serializer(SerializerConfig config) { - Preconditions.checkNotNull(config); + static Serializer> serializer(@NonNull SerializerConfig config) { Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); return serializerInternal(config, Collections.emptyMap()); } - static Serializer> deserializerWithSchema(SerializerConfig config) { - Preconditions.checkNotNull(config); + static Serializer> deserializerWithSchema(@NonNull SerializerConfig config) { Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); return deserializerInternal(config, Collections.emptyMap(), NO_TRANSFORM); } @@ -65,24 +76,21 @@ static Serializer> deserializerWithSchema(SerializerConfig co * This also takes a transform function which is applied on the deserialized object and should transform the object * into the type T. * - * @param config serializer config + * @param config serializer config * @param transform a transform function that transforms the deserialized object based on the serialization format * into an object of type T. * @param Type of object to get back from deserializer. * @return a deserializer that can deserialize protobuf, json or avro events into java objects. */ - static Serializer deserializeAsT(SerializerConfig config, - BiFunction transform) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(transform); + static Serializer deserializeAsT(@NonNull SerializerConfig config, + @NonNull BiFunction transform) { Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); return deserializeAsTInternal(config, Collections.emptyMap(), transform); } // endregion - private static Serializer> serializerInternal(SerializerConfig config, - Map> customSerializers) { - Preconditions.checkNotNull(config); + private static Serializer> serializerInternal(@NonNull SerializerConfig config, + @NonNull Map> customSerializers) { Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); @@ -116,7 +124,7 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { @Override - protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + public final Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); } }); @@ -148,7 +156,7 @@ private static Serializer> deserializerInternal(SerializerConf map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { @Override - protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + public final Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); } }); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java index 93e2e7f4d..95df8ddca 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java @@ -13,6 +13,9 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import java.io.IOException; import java.io.InputStream; @@ -33,7 +36,7 @@ class MultiFormatWithSchemaDeserializer extends AbstractDeserializer deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + public final WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); Object obj = genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema); if (obj instanceof WithSchema) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java index b47e5c9a4..dcbb54919 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java @@ -13,6 +13,9 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; import java.io.IOException; import java.io.InputStream; @@ -33,9 +36,10 @@ class MultipleFormatDeserializer extends AbstractDeserializer { } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + public final T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); return transform.apply(writerSchema.getSerializationFormat(), - genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); + genericDeserializers.get(writerSchema.getSerializationFormat()) + .deserialize(inputStream, writerSchema, readerSchema)); } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java index f3e3d6bef..327b9306f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java @@ -11,6 +11,7 @@ import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractSerializer; import org.apache.commons.lang3.NotImplementedException; import java.nio.ByteBuffer; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java deleted file mode 100644 index ee3975ddc..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java +++ /dev/null @@ -1,122 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import com.google.common.base.Preconditions; -import com.google.protobuf.DynamicMessage; -import com.google.protobuf.GeneratedMessageV3; -import com.google.protobuf.Message; -import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.common.Either; -import io.pravega.schemaregistry.schemas.ProtobufSchema; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nullable; -import java.util.Map; -import java.util.stream.Collectors; - -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; - -/** - * Internal Factory class for protobuf serializers and deserializers. - */ -@Slf4j -class ProtobufSerializerFactory { - static Serializer serializer(SerializerConfig config, - ProtobufSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), - config.isRegisterSchema(), config.isWriteEncodingHeader()); - } - - static Serializer deserializer(SerializerConfig config, - ProtobufSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schema); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - // schema can be null in which case deserialization will happen into dynamic message - return new ProtobufDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, - config.isWriteEncodingHeader()); - } - - static Serializer genericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { - Preconditions.checkNotNull(config); - Preconditions.checkArgument(schema != null || config.isWriteEncodingHeader(), - "Either read schema should be supplied or events should be tagged with encoding ids."); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - String groupId = config.getGroupId(); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new ProtobufGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, - config.isWriteEncodingHeader()); - } - - static Serializer multiTypeSerializer( - SerializerConfig config, Map, ProtobufSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - - Map, AbstractSerializer> serializerMap = schemas - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), - config.isRegisterSchema(), config.isWriteEncodingHeader()))); - return new MultiplexedSerializer<>(serializerMap); - } - - static Serializer multiTypeDeserializer( - SerializerConfig config, Map, ProtobufSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, - config.isWriteEncodingHeader()))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), encodingCache); - } - - static Serializer> typedOrGenericDeserializer( - SerializerConfig config, Map, ProtobufSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, - config.isWriteEncodingHeader()))); - ProtobufGenericDeserializer genericDeserializer = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, - config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); - return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, - config.getDecoders(), encodingCache); - } -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index e2467ddd0..eb29bf709 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -11,17 +11,31 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.avro.impl.AvroSerializerFactory; +import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; -import io.pravega.schemaregistry.schemas.AvroSchema; -import io.pravega.schemaregistry.schemas.JSONSchema; -import io.pravega.schemaregistry.schemas.ProtobufSchema; -import io.pravega.schemaregistry.schemas.Schema; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.json.impl.JsonDeserializer; +import io.pravega.schemaregistry.serializer.json.impl.JsonSerializerFactory; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.protobuf.impl.ProtobufSerializerFactory; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; +import io.pravega.schemaregistry.serializer.shared.impl.AbstractDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.CustomDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.CustomSerializer; +import io.pravega.schemaregistry.serializer.shared.impl.CustomSerializerFactory; +import io.pravega.schemaregistry.serializer.shared.impl.EncodingCache; +import io.pravega.schemaregistry.serializer.shared.impl.MultiplexedAndGenericDeserializer; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -29,8 +43,10 @@ import java.nio.ByteBuffer; import java.util.Map; import java.util.function.BiFunction; +import java.util.stream.Collectors; import static com.google.protobuf.DescriptorProtos.FileDescriptorSet; +import static io.pravega.schemaregistry.serializer.shared.impl.SerializerFactoryHelper.initForDeserializer; import static io.pravega.schemaregistry.serializers.WithSchema.JSON_TRANSFORM; import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; @@ -45,13 +61,13 @@ public class SerializerFactory { * It does not implement {@link Serializer#deserialize(ByteBuffer)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaContainer Schema container that encapsulates an AvroSchema + * @param schema Schema container that encapsulates an AvroSchema * @param Type of event. It accepts either POJO or Avro generated classes and serializes them. * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. */ - public static Serializer avroSerializer(SerializerConfig config, AvroSchema schemaContainer) { - return AvroSerializerFactory.serializer(config, schemaContainer); + public static Serializer avroSerializer(SerializerConfig config, AvroSchema schema) { + return AvroSerializerFactory.serializer(config, schema); } /** @@ -62,12 +78,12 @@ public static Serializer avroSerializer(SerializerConfig config, AvroSche * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaContainer Schema container that encapsulates an AvroSchema + * @param schema Schema container that encapsulates an AvroSchema * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #avroGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer avroDeserializer(SerializerConfig config, AvroSchema schemaContainer) { - return AvroSerializerFactory.deserializer(config, schemaContainer); + public static Serializer avroDeserializer(SerializerConfig config, AvroSchema schema) { + return AvroSerializerFactory.deserializer(config, schema); } /** @@ -78,12 +94,12 @@ public static Serializer avroDeserializer(SerializerConfig config, AvroSc * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaContainer Schema container that encapsulates an AvroSchema. It can be null to indicate that writer schema should + * @param schema Schema container that encapsulates an AvroSchema. It can be null to indicate that writer schema should * be used for deserialization. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer avroGenericDeserializer(SerializerConfig config, @Nullable AvroSchema schemaContainer) { - return AvroSerializerFactory.genericDeserializer(config, schemaContainer); + public static Serializer avroGenericDeserializer(SerializerConfig config, @Nullable AvroSchema schema) { + return AvroSerializerFactory.genericDeserializer(config, schema); } /** @@ -138,14 +154,14 @@ public static Serializer> avroTypedOrGenericDeserializer( * It does not implement {@link Serializer#deserialize(ByteBuffer)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaContainer Schema container that encapsulates an Protobuf Schema. + * @param schema Schema container that encapsulates an Protobuf Schema. * @param Type of event. * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. */ public static Serializer protobufSerializer(SerializerConfig config, - ProtobufSchema schemaContainer) { - return ProtobufSerializerFactory.serializer(config, schemaContainer); + ProtobufSchema schema) { + return ProtobufSerializerFactory.serializer(config, schema); } /** @@ -156,13 +172,13 @@ public static Serializer protobufSerializer(SerializerCon * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaContainer Schema container that encapsulates an ProtobufSchema + * @param schema Schema container that encapsulates an ProtobufSchema * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #protobufGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ public static Serializer protobufDeserializer(SerializerConfig config, - ProtobufSchema schemaContainer) { - return ProtobufSerializerFactory.deserializer(config, schemaContainer); + ProtobufSchema schema) { + return ProtobufSerializerFactory.deserializer(config, schema); } /** @@ -233,13 +249,13 @@ public static Serializer Type of event. * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. */ - public static Serializer jsonSerializer(SerializerConfig config, JSONSchema schemaContainer) { - return JsonSerializerFactory.serializer(config, schemaContainer); + public static Serializer jsonSerializer(SerializerConfig config, JSONSchema schema) { + return JsonSerializerFactory.serializer(config, schema); } /** @@ -250,12 +266,12 @@ public static Serializer jsonSerializer(SerializerConfig config, JSONSche * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaContainer Schema container that encapsulates an JSONSchema + * @param schema Schema container that encapsulates an JSONSchema * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #jsonGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer jsonDeserializer(SerializerConfig config, JSONSchema schemaContainer) { - return JsonSerializerFactory.deserializer(config, schemaContainer); + public static Serializer jsonDeserializer(@NonNull SerializerConfig config, @Nullable JSONSchema schema) { + return JsonSerializerFactory.deserializer(config, schema); } /** @@ -267,8 +283,15 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer> jsonGenericDeserializer(SerializerConfig config) { - return JsonSerializerFactory.genericDeserializer(config); + public static Serializer> jsonGenericDeserializer(@NonNull SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonWithSchemaDeserializer(groupId, schemaRegistryClient, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); } /** @@ -280,8 +303,8 @@ public static Serializer> jsonGenericDeserializer(Serialize * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer jsonStringDeserializer(SerializerConfig config) { - return JsonSerializerFactory.jsonStringDeserializer(config); + public static Serializer jsonStringDeserializer(@NonNull SerializerConfig config) { + return JsonSerializerFactory.deserializeAsString(config); } /** @@ -293,7 +316,7 @@ public static Serializer jsonStringDeserializer(SerializerConfig config) * @return a Serializer which can serialize events of different types for which schemas are supplied. */ public static Serializer jsonMultiTypeSerializer( - SerializerConfig config, Map, JSONSchema> schemas) { + @NonNull SerializerConfig config, @NonNull Map, JSONSchema> schemas) { return JsonSerializerFactory.multiTypeSerializer(config, schemas); } @@ -307,7 +330,7 @@ public static Serializer jsonMultiTypeSerializer( * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ public static Serializer jsonMultiTypeDeserializer( - SerializerConfig config, Map, JSONSchema> schemas) { + @NonNull SerializerConfig config, @NonNull Map, JSONSchema> schemas) { return JsonSerializerFactory.multiTypeDeserializer(config, schemas); } @@ -321,8 +344,22 @@ public static Serializer jsonMultiTypeDeserializer( * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ public static Serializer>> jsonTypedOrGenericDeserializer( - SerializerConfig config, Map, JSONSchema> schemas) { - return JsonSerializerFactory.typedOrGenericDeserializer(config, schemas); + @NonNull SerializerConfig config, @NonNull Map, JSONSchema> schemas) { + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()))); + JsonWithSchemaDeserializer genericDeserializer = new JsonWithSchemaDeserializer(groupId, schemaRegistryClient, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, genericDeserializer, config.getDecoders(), encodingCache); } //endregion diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index 3787da2f1..a95564ee9 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -17,12 +17,12 @@ import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; -import io.pravega.schemaregistry.schemas.AvroSchema; -import io.pravega.schemaregistry.schemas.JSONSchema; -import io.pravega.schemaregistry.schemas.ProtobufSchema; -import io.pravega.schemaregistry.schemas.Schema; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.shared.schemas.Schema; import lombok.AccessLevel; import lombok.Getter; import org.apache.avro.generic.IndexedRecord; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java deleted file mode 100644 index 57bc2e7c8..000000000 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java +++ /dev/null @@ -1,128 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.schemas; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.protobuf.DescriptorProtos; -import com.google.protobuf.DynamicMessage; -import com.google.protobuf.GeneratedMessageV3; -import io.pravega.schemaregistry.contract.data.SerializationFormat; -import io.pravega.schemaregistry.testobjs.DerivedUser1; -import io.pravega.schemaregistry.testobjs.DerivedUser2; -import io.pravega.schemaregistry.testobjs.SchemaDefinitions; -import io.pravega.schemaregistry.testobjs.User; -import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; -import io.pravega.schemaregistry.testobjs.generated.Test1; -import io.pravega.schemaregistry.testobjs.generated.Test2; -import org.apache.avro.specific.SpecificRecordBase; -import org.junit.Test; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; - -import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING; -import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING_DRAFT_4; -import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING_DRAFT_7; -import static org.junit.Assert.*; - -public class SchemasTest { - @Test - public void testAvroSchema() { - AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); - assertNotNull(schema.getSchema()); - assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - - AvroSchema schema2 = AvroSchema.of(User.class); - assertNotNull(schema2.getSchema()); - assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - - AvroSchema schema3 = AvroSchema.of(Test1.class); - assertNotNull(schema3.getSchema()); - assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - - AvroSchema schemabase1 = AvroSchema.ofSpecificRecord(Test1.class); - assertNotNull(schemabase1.getSchema()); - assertEquals(schemabase1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - - AvroSchema schemabase2 = AvroSchema.ofSpecificRecord(Test2.class); - assertNotNull(schemabase2.getSchema()); - assertEquals(schemabase2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - } - - @Test - public void testProtobufSchema() throws IOException { - ProtobufSchema sm1 = ProtobufSchema.of(ProtobufTest.Message1.class); - assertNotNull(sm1.getParser()); - assertNotNull(sm1.getFileDescriptorSet()); - assertEquals(sm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - - ProtobufSchema bm1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class); - assertNotNull(bm1.getParser()); - assertNotNull(bm1.getFileDescriptorSet()); - assertEquals(bm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - - ProtobufSchema bm2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class); - assertNotNull(bm2.getParser()); - assertNotNull(bm2.getFileDescriptorSet()); - assertEquals(bm2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - - Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); - byte[] schemaBytes = Files.readAllBytes(path); - DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); - - ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class.getName(), descriptorSet); - assertNull(schema.getParser()); - assertNotNull(schema.getFileDescriptorSet()); - assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - - ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message1.class, descriptorSet); - assertNotNull(schema2.getParser()); - assertNotNull(schema2.getFileDescriptorSet()); - assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - - ProtobufSchema baseSchema1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class, descriptorSet); - assertNotNull(baseSchema1.getParser()); - assertNotNull(baseSchema1.getFileDescriptorSet()); - assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - - ProtobufSchema baseSchema2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); - assertNotNull(baseSchema2.getParser()); - assertNotNull(baseSchema2.getFileDescriptorSet()); - assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - } - - @Test - public void testJsonSchema() { - JSONSchema schema = JSONSchema.of(User.class); - assertNotNull(schema.getSchema()); - assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - - JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING, String.class); - assertNotNull(schema2.getSchema()); - assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - - JSONSchema schema3 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_4, JsonNode.class); - assertNotNull(schema3.getSchema()); - assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - - JSONSchema schema4 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_7, JsonNode.class); - assertNotNull(schema4.getSchema()); - assertEquals(schema4.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - - JSONSchema baseSchema1 = JSONSchema.ofBaseType(DerivedUser1.class, User.class); - assertNotNull(baseSchema1.getSchema()); - assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - JSONSchema baseSchema2 = JSONSchema.ofBaseType(DerivedUser2.class, User.class); - assertNotNull(baseSchema2.getSchema()); - assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - } -} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 8ee567058..160fc9729 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -9,351 +9,46 @@ */ package io.pravega.schemaregistry.serializers; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.jsonFormatVisitors.JsonFormatTypes; -import com.fasterxml.jackson.databind.node.TextNode; -import com.fasterxml.jackson.module.jsonSchema.JsonSchema; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; -import com.google.protobuf.GeneratedMessageV3; import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test1; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codecs; -import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupProperties; -import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.contract.data.SchemaWithVersion; import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.contract.data.VersionInfo; -import io.pravega.schemaregistry.schemas.AvroSchema; -import io.pravega.schemaregistry.schemas.JSONSchema; -import io.pravega.schemaregistry.schemas.ProtobufSchema; -import io.pravega.schemaregistry.testobjs.Address; -import io.pravega.schemaregistry.testobjs.DerivedUser1; -import io.pravega.schemaregistry.testobjs.DerivedUser2; -import io.pravega.schemaregistry.testobjs.SchemaDefinitions; -import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; -import io.pravega.schemaregistry.testobjs.generated.Test1; -import io.pravega.schemaregistry.testobjs.generated.Test2; -import io.pravega.test.common.AssertExtensions; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.SneakyThrows; -import org.apache.avro.generic.GenericData; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import io.pravega.schemaregistry.serializer.shared.testobjs.Address; +import io.pravega.schemaregistry.serializer.shared.testobjs.DerivedUser1; import org.apache.avro.generic.GenericRecord; -import org.apache.avro.specific.SpecificRecordBase; import org.junit.Test; import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; public class SerializerTest { - @Test - public void testAvroSerializers() { - SchemaRegistryClient client = mock(SchemaRegistryClient.class); - - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); - AvroSchema schema1 = AvroSchema.of(Test1.class); - AvroSchema schema2 = AvroSchema.of(Test2.class); - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); - VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); - doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) - .when(client).getGroupProperties(anyString()); - doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); - doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); - doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); - doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - - AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); - VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); - doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); - doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - - Serializer serializerStr = SerializerFactory.avroSerializer(config, of); - GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); - ByteBuffer serialized1 = serializerStr.serialize(enumSymbol); - - Serializer deserializer1 = SerializerFactory.avroDeserializer(config, of); - Object deserializedEnum = deserializer1.deserialize(serialized1); - assertEquals(deserializedEnum, enumSymbol); - - Serializer serializer = SerializerFactory.avroSerializer(config, schema1); - Test1 test1 = new Test1("name", 1); - ByteBuffer serialized = serializer.serialize(test1); - - Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); - Test1 deserialized = deserializer.deserialize(serialized); - assertEquals(deserialized, test1); - - serialized = serializer.serialize(test1); - Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); - Object genericDeserialized = genericDeserializer.deserialize(serialized); - assertTrue(genericDeserialized instanceof GenericRecord); - assertEquals(((GenericRecord) genericDeserialized).get("name").toString(), "name"); - assertEquals(((GenericRecord) genericDeserialized).get("field1"), 1); - - // multi type - Test2 test2 = new Test2("name", 1, "2"); - - AvroSchema schema1Base = AvroSchema.ofSpecificRecord(Test1.class); - AvroSchema schema2Base = AvroSchema.ofSpecificRecord(Test2.class); - Map, AvroSchema> map = new HashMap<>(); - map.put(Test1.class, schema1Base); - map.put(Test2.class, schema2Base); - Serializer multiSerializer = SerializerFactory.avroMultiTypeSerializer(config, map); - serialized = multiSerializer.serialize(test1); - Serializer multiDeserializer = SerializerFactory.avroMultiTypeDeserializer(config, map); - SpecificRecordBase deserialized2 = multiDeserializer.deserialize(serialized); - assertEquals(deserialized2, test1); - - serialized = multiSerializer.serialize(test2); - deserialized2 = multiDeserializer.deserialize(serialized); - assertEquals(deserialized2, test2); - - Map, AvroSchema> map2 = new HashMap<>(); - map2.put(Test1.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); - - serialized = multiSerializer.serialize(test1); - Either fallback = fallbackDeserializer.deserialize(serialized); - assertTrue(fallback.isLeft()); - assertEquals(fallback.getLeft(), test1); - - serialized = multiSerializer.serialize(test2); - - fallback = fallbackDeserializer.deserialize(serialized); - assertTrue(fallback.isRight()); - } - - @Test - @SneakyThrows - public void testAvroSerializersReflect() { - TestClass test1 = new TestClass("name"); - AvroSchema schema1 = AvroSchema.of(TestClass.class); - - SchemaRegistryClient client = mock(SchemaRegistryClient.class); - - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); - - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); - doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) - .when(client).getGroupProperties(anyString()); - doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); - doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - - Serializer serializer = SerializerFactory.avroSerializer(config, schema1); - ByteBuffer serialized = serializer.serialize(test1); - - Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); - TestClass deserialized = deserializer.deserialize(serialized); - assertEquals(deserialized, test1); - } - - @Test - public void testProtobufSerializers() throws IOException { - SchemaRegistryClient client = mock(SchemaRegistryClient.class); - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); - Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); - byte[] schemaBytes = Files.readAllBytes(path); - DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); - ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); - ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message3.class, descriptorSet); - - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); - VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); - doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) - .when(client).getGroupProperties(anyString()); - doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); - doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); - doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); - doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - - Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); - ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); - ByteBuffer serialized = serializer.serialize(message); - - Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); - ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); - assertEquals(deserialized, message); - - serialized = serializer.serialize(message); - Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, null); - DynamicMessage generic = genericDeserializer.deserialize(serialized); - assertEquals(generic.getAllFields().size(), 2); - - // multi type - ProtobufTest.Message3 message2 = ProtobufTest.Message3.newBuilder().setName("name").setField1(1).setField2(2).build(); - - ProtobufSchema schema1Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); - ProtobufSchema schema2Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message3.class, descriptorSet); - Map, ProtobufSchema> map = new HashMap<>(); - map.put(ProtobufTest.Message2.class, schema1Base); - map.put(ProtobufTest.Message3.class, schema2Base); - Serializer multiSerializer = SerializerFactory.protobufMultiTypeSerializer(config, map); - serialized = multiSerializer.serialize(message); - Serializer multiDeserializer = SerializerFactory.protobufMultiTypeDeserializer(config, map); - GeneratedMessageV3 deserialized2 = multiDeserializer.deserialize(serialized); - assertEquals(deserialized2, message); - - serialized = multiSerializer.serialize(message2); - deserialized2 = multiDeserializer.deserialize(serialized); - assertEquals(deserialized2, message2); - - Map, ProtobufSchema> map2 = new HashMap<>(); - map2.put(ProtobufTest.Message2.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.protobufTypedOrGenericDeserializer(config, map2); - serialized = multiSerializer.serialize(message); - Either fallback = fallbackDeserializer.deserialize(serialized); - assertTrue(fallback.isLeft()); - assertEquals(fallback.getLeft(), message); - - serialized = multiSerializer.serialize(message2); - - fallback = fallbackDeserializer.deserialize(serialized); - assertTrue(fallback.isRight()); - } - - @Test - public void testJsonSerializers() throws JsonProcessingException { - SchemaRegistryClient client = mock(SchemaRegistryClient.class); - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); - JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); - JSONSchema schema2 = JSONSchema.of(DerivedUser2.class); - - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); - VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); - doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) - .when(client).getGroupProperties(anyString()); - doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); - doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); - doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); - doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - - Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); - DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); - ByteBuffer serialized = serializer.serialize(user1); - - Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); - DerivedUser1 deserialized = deserializer.deserialize(serialized); - assertEquals(deserialized, user1); - - serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - WithSchema generic = genericDeserializer.deserialize(serialized); - assertEquals(((JSONSchema) generic.getSchema()).getSchema(), schema1.getSchema()); - assertEquals(((JsonNode) generic.getObject()).size(), 4); - - serialized = serializer.serialize(user1); - Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); - String str = stringDeserializer.deserialize(serialized); - assertFalse(Strings.isNullOrEmpty(str)); - - String schemaString = "{\"type\": \"object\",\"title\": \"The external data schema\",\"properties\": {\"content\": {\"type\": \"string\"}}}"; - - JSONSchema myData = JSONSchema.of("MyData", schemaString, HashMap.class); - VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); - doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); - doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - - Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); - HashMap jsonObject = new HashMap<>(); - jsonObject.put("content", "mxx"); - - ByteBuffer s = serializer2.serialize(jsonObject); - str = stringDeserializer.deserialize(s); - - String stringSchema = new ObjectMapper().writeValueAsString(JsonSchema.minimalForFormat(JsonFormatTypes.STRING)); - - JSONSchema strSchema = JSONSchema.of("string", stringSchema, String.class); - VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); - doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); - doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); - doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); - - Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); - Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); - Serializer> generic3 = SerializerFactory.jsonGenericDeserializer(config); - String string = "a"; - s = serializer3.serialize(string); - Object x = deserializer3.deserialize(s); - assertNotNull(x); - assertEquals(x, string); - s = serializer3.serialize(string); - Object jsonNode = generic3.deserialize(s); - assertTrue(((WithSchema) jsonNode).getObject() instanceof TextNode); - assertEquals(((TextNode) ((WithSchema) jsonNode).getObject()).textValue(), string); - // multi type - DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); - - JSONSchema schema1Base = JSONSchema.ofBaseType(DerivedUser1.class, Object.class); - JSONSchema schema2Base = JSONSchema.ofBaseType(DerivedUser2.class, Object.class); - Map, JSONSchema> map = new HashMap<>(); - map.put(DerivedUser1.class, schema1Base); - map.put(DerivedUser2.class, schema2Base); - Serializer multiSerializer = SerializerFactory.jsonMultiTypeSerializer(config, map); - serialized = multiSerializer.serialize(user1); - Serializer multiDeserializer = SerializerFactory.jsonMultiTypeDeserializer(config, map); - Object deserialized2 = multiDeserializer.deserialize(serialized); - assertEquals(deserialized2, user1); - - serialized = multiSerializer.serialize(user2); - deserialized2 = multiDeserializer.deserialize(serialized); - assertEquals(deserialized2, user2); - - Map, JSONSchema> map2 = new HashMap<>(); - map2.put(DerivedUser1.class, schema1Base); - Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); - serialized = multiSerializer.serialize(user1); - Either> fallback = fallbackDeserializer.deserialize(serialized); - assertTrue(fallback.isLeft()); - assertEquals(fallback.getLeft(), user1); - - serialized = multiSerializer.serialize(user2); - - fallback = fallbackDeserializer.deserialize(serialized); - assertTrue(fallback.isRight()); - } - @Test public void testMultiformatDeserializers() throws IOException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); - Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); - byte[] schemaBytes = Files.readAllBytes(path); - DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); AvroSchema schema1 = AvroSchema.of(Test1.class); - ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message2.class); JSONSchema schema3 = JSONSchema.of(DerivedUser1.class); VersionInfo versionInfo1 = new VersionInfo("avro", 0, 0); @@ -391,7 +86,7 @@ public void testMultiformatDeserializers() throws IOException { deserialized = deserializer.deserialize(serializedProto); assertTrue(deserialized instanceof DynamicMessage); deserialized = deserializer.deserialize(serializedJson); - assertTrue(deserialized instanceof WithSchema); + assertTrue(deserialized instanceof JsonNode); Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); @@ -404,89 +99,4 @@ public void testMultiformatDeserializers() throws IOException { jsonString = jsonStringDeserializer.deserialize(serializedJson); assertNotNull(jsonString); } - - @Test - public void testNoEncodingProto() throws IOException { - SchemaRegistryClient client = mock(SchemaRegistryClient.class); - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") - .writeEncodingHeader(false).build(); - Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); - byte[] schemaBytes = Files.readAllBytes(path); - DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); - ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); - - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); - doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) - .properties(ImmutableMap.of()).build()) - .when(client).getGroupProperties(anyString()); - doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); - doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); - doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - - Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); - verify(client, never()).getEncodingId(anyString(), any(), any()); - - ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); - ByteBuffer serialized = serializer.serialize(message); - - Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); - verify(client, never()).getEncodingInfo(anyString(), any()); - - ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); - assertEquals(deserialized, message); - - serialized = serializer.serialize(message); - AssertExtensions.assertThrows(IllegalArgumentException.class, () -> SerializerFactory.protobufGenericDeserializer(config, null)); - - SchemaInfo latestSchema = client.getLatestSchemaVersion("groupId", null).getSchemaInfo(); - ProtobufSchema schemaDynamic = ProtobufSchema.of(latestSchema.getType(), descriptorSet); - Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, schemaDynamic); - - DynamicMessage generic = genericDeserializer.deserialize(serialized); - assertEquals(generic.getAllFields().size(), 2); - } - - @Test - public void testNoEncodingJson() throws IOException { - SchemaRegistryClient client = mock(SchemaRegistryClient.class); - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") - .writeEncodingHeader(false).build(); - JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); - - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); - doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) - .properties(ImmutableMap.of()).build()) - .when(client).getGroupProperties(anyString()); - doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); - doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); - doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - - Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); - verify(client, never()).getEncodingId(anyString(), any(), any()); - DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); - ByteBuffer serialized = serializer.serialize(user1); - - Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); - verify(client, never()).getEncodingInfo(anyString(), any()); - DerivedUser1 deserialized = deserializer.deserialize(serialized); - assertEquals(deserialized, user1); - - serialized = serializer.serialize(user1); - - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - - WithSchema generic = genericDeserializer.deserialize(serialized); - assertNotNull(generic.getObject()); - assertNull(generic.getSchema()); - } - - @Data - @NoArgsConstructor - public static class TestClass { - private String test; - - public TestClass(String test) { - this.test = test; - } - } } diff --git a/server/src/main/java/io/pravega/schemaregistry/server/rest/RestServer.java b/server/src/main/java/io/pravega/schemaregistry/server/rest/RestServer.java index af652690f..ffe5d4658 100644 --- a/server/src/main/java/io/pravega/schemaregistry/server/rest/RestServer.java +++ b/server/src/main/java/io/pravega/schemaregistry/server/rest/RestServer.java @@ -76,7 +76,7 @@ protected void startUp() { log.info("Starting REST server listening on port: {}", this.restServerConfig.getPort()); if (restServerConfig.isTlsEnabled()) { SSLContextConfigurator contextConfigurator = new SSLContextConfigurator(); - contextConfigurator.setKeyStoreFile(restServerConfig.getServerKeyStoreFilePath()); + contextConfigurator.setKeyStoreFile(restServerConfig.getTlsKeyStoreFilePath()); contextConfigurator.setKeyStorePass(JKSHelper.loadPasswordFrom(restServerConfig.getTlsKeyStorePasswordFilePath())); httpServer = GrizzlyHttpServerFactory.createHttpServer(baseUri, resourceConfig, true, new SSLEngineConfigurator(contextConfigurator, false, false, false)); diff --git a/server/src/main/java/io/pravega/schemaregistry/server/rest/ServiceConfig.java b/server/src/main/java/io/pravega/schemaregistry/server/rest/ServiceConfig.java index 38138b5f3..49ae01536 100644 --- a/server/src/main/java/io/pravega/schemaregistry/server/rest/ServiceConfig.java +++ b/server/src/main/java/io/pravega/schemaregistry/server/rest/ServiceConfig.java @@ -28,7 +28,7 @@ public class ServiceConfig implements ServerConfig { @ToString.Exclude private final String tlsCertFilePath; @ToString.Exclude - private final String serverKeyStoreFilePath; + private final String tlsKeyStoreFilePath; @ToString.Exclude private final String tlsKeyStorePasswordFilePath; private final boolean authEnabled; @@ -36,17 +36,22 @@ public class ServiceConfig implements ServerConfig { private final String userPasswordFilePath; private ServiceConfig(String host, int port, boolean tlsEnabled, String tlsCertFilePath, - String serverKeyStoreFilePath, String tlsKeyStorePasswordFilePath, boolean authEnabled, String userPasswordFilePath) { + String tlsKeyStoreFilePath, String tlsKeyStorePasswordFilePath, boolean authEnabled, String userPasswordFilePath) { Exceptions.checkNotNullOrEmpty(host, "host"); Exceptions.checkArgument(port > 0, "port", "Should be positive integer"); - Exceptions.checkArgument(!tlsEnabled || (!Strings.isNullOrEmpty(tlsCertFilePath) && - !Strings.isNullOrEmpty(serverKeyStoreFilePath)), "keyFilePath", - "If tls is enabled then key file path and key file password path should be non empty"); + if (tlsEnabled) { + Exceptions.checkArgument(!Strings.isNullOrEmpty(tlsCertFilePath), "keyCertPath", + "If tls is enabled then cert file path should be non empty"); + Exceptions.checkArgument(!Strings.isNullOrEmpty(tlsKeyStoreFilePath), "keyFilePath", + "If tls is enabled then key file path should be non empty"); + Exceptions.checkArgument(!Strings.isNullOrEmpty(tlsKeyStorePasswordFilePath), "keyPasswordFilePath", + "If tls is enabled then key file password path should be non empty"); + } this.host = host; this.port = port; this.tlsEnabled = tlsEnabled; this.tlsCertFilePath = tlsCertFilePath; - this.serverKeyStoreFilePath = serverKeyStoreFilePath; + this.tlsKeyStoreFilePath = tlsKeyStoreFilePath; this.tlsKeyStorePasswordFilePath = tlsKeyStorePasswordFilePath; this.authEnabled = authEnabled; this.userPasswordFilePath = userPasswordFilePath; diff --git a/server/src/main/java/io/pravega/schemaregistry/service/Config.java b/server/src/main/java/io/pravega/schemaregistry/service/Config.java index fa570736c..1f0af3e13 100644 --- a/server/src/main/java/io/pravega/schemaregistry/service/Config.java +++ b/server/src/main/java/io/pravega/schemaregistry/service/Config.java @@ -218,8 +218,8 @@ private static ServiceConfig createServiceConfig() { .userPasswordFilePath(Config.USER_PASSWORD_FILE) .tlsEnabled(Config.TLS_ENABLED) .tlsCertFilePath(Config.TLS_CERT_FILE) - .serverKeyStoreFilePath(Config.TLS_KEY_FILE) - .serverKeyStoreFilePath(Config.TLS_KEY_PASSWORD_FILE) + .tlsKeyStoreFilePath(Config.TLS_KEY_FILE) + .tlsKeyStorePasswordFilePath(Config.TLS_KEY_PASSWORD_FILE) .build(); } diff --git a/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java b/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java index 016c5fb69..a590ded53 100644 --- a/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java +++ b/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java @@ -662,7 +662,6 @@ private CompletableFuture> getSchemasForValidation(Strin GroupProperties groupProperties) { switch (groupProperties.getCompatibility().getType()) { case AllowAny: - return CompletableFuture.completedFuture(Collections.emptyList()); case DenyAll: // Deny all is applicable as long as there is at least one schema in the group. return store.listLatestSchemas(namespace, group); @@ -757,7 +756,7 @@ private boolean checkCompatibility(SchemaInfo schema, GroupProperties groupPrope CompatibilityChecker checker = CompatibilityCheckerFactory.getCompatibilityChecker(schema.getSerializationFormat()); // Verify that the type matches the type in schemas it will be validated against. - if (!schemasWithVersion.stream().allMatch(x -> x.getSchemaInfo().getType().equals(schema.getType()))) { + if (!groupProperties.isAllowMultipleTypes() && !schemasWithVersion.stream().allMatch(x -> x.getSchemaInfo().getType().equals(schema.getType()))) { return false; } switch (groupProperties.getCompatibility().getType()) { diff --git a/server/src/main/java/io/pravega/schemaregistry/storage/client/TableStore.java b/server/src/main/java/io/pravega/schemaregistry/storage/client/TableStore.java index 90ebe0aee..f6e547007 100644 --- a/server/src/main/java/io/pravega/schemaregistry/storage/client/TableStore.java +++ b/server/src/main/java/io/pravega/schemaregistry/storage/client/TableStore.java @@ -17,7 +17,9 @@ import io.netty.buffer.Unpooled; import io.netty.util.ReferenceCountUtil; import io.pravega.client.ClientConfig; -import io.pravega.client.netty.impl.ConnectionFactoryImpl; +import io.pravega.client.connection.impl.ConnectionPoolImpl; +import io.pravega.client.connection.impl.SocketConnectionFactoryImpl; +import io.pravega.client.tables.IteratorItem; import io.pravega.client.tables.impl.IteratorStateImpl; import io.pravega.client.tables.impl.TableSegmentEntry; import io.pravega.client.tables.impl.TableSegmentKey; @@ -90,9 +92,10 @@ public class TableStore extends AbstractService { private final Cache tokenCache; public TableStore(ClientConfig clientConfig, ScheduledExecutorService executor) { - ConnectionFactoryImpl connectionFactory = new ConnectionFactoryImpl(clientConfig); + SocketConnectionFactoryImpl connectionFactory = new SocketConnectionFactoryImpl(clientConfig); + ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(clientConfig, connectionFactory); hostStore = new HostStoreImpl(clientConfig, executor); - segmentHelper = new SegmentHelper(connectionFactory, hostStore); + segmentHelper = new SegmentHelper(connectionPool, hostStore, executor); this.executor = executor; this.tokenSupplier = x -> { String[] splits = x.split("/"); @@ -321,9 +324,8 @@ public CompletableFuture> getKeysPaginated(String tab try { List items = result.getItems().stream().map(x -> fromByteKey.apply(getArray(x.getKey()))) .collect(Collectors.toList()); - ByteBuf byteBuf = Unpooled.wrappedBuffer(result.getState().toBytes()); log.trace("get keys paginated on table {} returned items {}", tableName, items); - return new ResultPage<>(items, byteBuf); + return new ResultPage<>(items, getNextToken(continuationToken, result)); } finally { releaseKeys(result.getItems()); } @@ -346,13 +348,18 @@ private CompletableFuture, ByteBuf>> getE }).collect(Collectors.toList()); log.trace("get keys paginated on table {} returned number of items {}", tableName, items.size()); - return new ResultPage<>(items, Unpooled.wrappedBuffer(result.getState().toBytes())); + return new ResultPage<>(items, getNextToken(continuationToken, result)); } finally { releaseEntries(result.getItems()); } }); } + private ByteBuf getNextToken(ByteBuf continuationToken, IteratorItem result) { + return result.getItems().isEmpty() && result.getState().isEmpty() ? + continuationToken : Unpooled.wrappedBuffer(result.getState().toBytes()); + } + private Supplier> exceptionalCallback(Supplier> future, Supplier errorMessageSupplier, String tableName, boolean throwOriginalOnCfe) { return () -> CompletableFuture.completedFuture(null).thenComposeAsync(v -> future.get(), executor).exceptionally(t -> { diff --git a/server/src/test/java/io/pravega/schemaregistry/server/rest/ConfigTest.java b/server/src/test/java/io/pravega/schemaregistry/server/rest/ConfigTest.java new file mode 100644 index 000000000..753826e64 --- /dev/null +++ b/server/src/test/java/io/pravega/schemaregistry/server/rest/ConfigTest.java @@ -0,0 +1,42 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.server.rest; + +import io.pravega.test.common.AssertExtensions; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class ConfigTest { + @Test + public void testValidConfig() { + ServiceConfig config = ServiceConfig.builder().build(); + assertEquals(config.getHost(), "0.0.0.0"); + assertEquals(config.getPort(), 9092); + assertFalse(config.isAuthEnabled()); + assertFalse(config.isTlsEnabled()); + } + + @Test + public void testTlsConfig() { + // invalid config + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> ServiceConfig.builder().tlsEnabled(true).build()); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> ServiceConfig.builder().tlsEnabled(true) + .tlsCertFilePath("a").build()); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> + ServiceConfig.builder().tlsEnabled(true).tlsCertFilePath("a").tlsKeyStoreFilePath("a").build()); + + // valid config + ServiceConfig config = ServiceConfig.builder().tlsEnabled(true).tlsCertFilePath("a").tlsKeyStoreFilePath("a").tlsKeyStorePasswordFilePath("a").build(); + assertTrue(config.isTlsEnabled()); + } +} diff --git a/server/src/test/java/io/pravega/schemaregistry/service/SchemaRegistryServiceTest.java b/server/src/test/java/io/pravega/schemaregistry/service/SchemaRegistryServiceTest.java index be4c720f9..76a200c04 100644 --- a/server/src/test/java/io/pravega/schemaregistry/service/SchemaRegistryServiceTest.java +++ b/server/src/test/java/io/pravega/schemaregistry/service/SchemaRegistryServiceTest.java @@ -27,6 +27,7 @@ import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.contract.data.VersionInfo; import io.pravega.schemaregistry.exceptions.CodecTypeNotRegisteredException; +import io.pravega.schemaregistry.exceptions.IncompatibleSchemaException; import io.pravega.schemaregistry.exceptions.PreconditionFailedException; import io.pravega.schemaregistry.exceptions.SerializationFormatMismatchException; import io.pravega.schemaregistry.storage.ContinuationToken; @@ -209,7 +210,7 @@ public void testAddSchema() { Compatibility.forward()).build()); }).when(store).getGroupProperties(any(), anyString()); byte[] schemaData = new byte[0]; - SchemaInfo schemaInfo = new SchemaInfo("mygroup", SerializationFormat.custom("custom1"), + SchemaInfo schemaInfo = new SchemaInfo("type", SerializationFormat.custom("custom1"), ByteBuffer.wrap(schemaData), ImmutableMap.of()); VersionInfo versionInfo = new VersionInfo("objectType", 5, 7); @@ -238,18 +239,22 @@ public void testAddSchema() { GroupProperties.builder().allowMultipleTypes(Boolean.FALSE).properties( ImmutableMap.builder().build()).serializationFormat( SerializationFormat.custom("custom1")).compatibility( - Compatibility.forward()).build())).when(store).getGroupProperties( + Compatibility.allowAny()).build())).when(store).getGroupProperties( any(), anyString()); doAnswer(x -> Futures.failedFuture( StoreExceptions.create(StoreExceptions.Type.DATA_NOT_FOUND, "Group Not Found"))).when( store).getSchemaVersion(any(), anyString(), any(), any()); - SchemaWithVersion schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + schemaData = new byte[1]; + SchemaInfo schemaInfo1 = new SchemaInfo("type1", SerializationFormat.custom("custom1"), + ByteBuffer.wrap(schemaData), + ImmutableMap.of()); + SchemaWithVersion schemaWithVersion = new SchemaWithVersion(schemaInfo1, versionInfo); List schemaWithVersionList = new ArrayList<>(); schemaWithVersionList.add(schemaWithVersion); - doAnswer(x -> CompletableFuture.completedFuture(schemaWithVersion)).when(store).getLatestSchemaVersion( + doAnswer(x -> CompletableFuture.completedFuture(schemaWithVersionList)).when(store).listLatestSchemas( any(), anyString()); - // get CheckCompatibility to fail - versionInfo1 = service.addSchema(null, "mygroup", schemaInfo).join(); + // CheckCompatibility will fail due to differing types. allowMultipleTypes is false. + AssertExtensions.assertThrows("An exception should have been thrown", () -> service.addSchema(null, "mygroup", schemaInfo).join(), e -> e instanceof IncompatibleSchemaException); // Runtime Exception doAnswer(x -> Futures.failedFuture(new RuntimeException())).when(store).getSchemaVersion(any(), anyString(), any(), any()); diff --git a/settings.gradle b/settings.gradle index 39686cd59..d8f06f940 100644 --- a/settings.gradle +++ b/settings.gradle @@ -13,8 +13,11 @@ rootProject.name = 'schema-registry' include 'client', 'common', 'contract', - 'serializers', 'server', - 'auth' - - \ No newline at end of file + 'auth', + 'serializers:shared', + 'serializers:protobuf', + 'serializers:json', + 'serializers:avro', + 'serializers', + 'test' diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java b/test/src/main/java/io/pravega/schemaregistry/integrationtest/Address.java similarity index 91% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java rename to test/src/main/java/io/pravega/schemaregistry/integrationtest/Address.java index fb5113a4d..684badee2 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java +++ b/test/src/main/java/io/pravega/schemaregistry/integrationtest/Address.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.testobjs; +package io.pravega.schemaregistry.integrationtest; import lombok.AllArgsConstructor; import lombok.Data; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java b/test/src/main/java/io/pravega/schemaregistry/integrationtest/DerivedUser1.java similarity index 93% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java rename to test/src/main/java/io/pravega/schemaregistry/integrationtest/DerivedUser1.java index bbf8651e2..3452631f0 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java +++ b/test/src/main/java/io/pravega/schemaregistry/integrationtest/DerivedUser1.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.testobjs; +package io.pravega.schemaregistry.integrationtest; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java b/test/src/main/java/io/pravega/schemaregistry/integrationtest/DerivedUser2.java similarity index 93% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java rename to test/src/main/java/io/pravega/schemaregistry/integrationtest/DerivedUser2.java index 111caef30..95d0d1f61 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java +++ b/test/src/main/java/io/pravega/schemaregistry/integrationtest/DerivedUser2.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.testobjs; +package io.pravega.schemaregistry.integrationtest; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java b/test/src/main/java/io/pravega/schemaregistry/integrationtest/User.java similarity index 92% rename from serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java rename to test/src/main/java/io/pravega/schemaregistry/integrationtest/User.java index fb7129bf3..200ed1594 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java +++ b/test/src/main/java/io/pravega/schemaregistry/integrationtest/User.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.testobjs; +package io.pravega.schemaregistry.integrationtest; import lombok.AllArgsConstructor; import lombok.Data; diff --git a/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestEndToEnd.java b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestEndToEnd.java new file mode 100644 index 000000000..4824dde3d --- /dev/null +++ b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestEndToEnd.java @@ -0,0 +1,268 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.integrationtest; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import io.pravega.common.Exceptions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.client.exceptions.RegistryExceptions; +import io.pravega.schemaregistry.contract.data.Compatibility; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.server.rest.RestServer; +import io.pravega.schemaregistry.server.rest.ServiceConfig; +import io.pravega.schemaregistry.service.Config; +import io.pravega.schemaregistry.service.SchemaRegistryService; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.storage.SchemaStore; +import io.pravega.test.common.AssertExtensions; +import io.pravega.test.common.TestUtils; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.Schema; +import org.apache.avro.SchemaBuilder; +import org.apache.curator.shaded.com.google.common.base.Charsets; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.net.URI; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Random; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.junit.Assert.*; + +@Slf4j +public abstract class TestEndToEnd { + public static final Random RANDOM = new Random(); + @Rule + public Timeout globalTimeout = new Timeout(3, TimeUnit.MINUTES); + + ScheduledExecutorService executor; + + private final Schema schema1 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + private final Schema schema2 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .withDefault("backwardPolicy compatible with schema1") + .endRecord(); + + private final Schema schema3 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + private final Schema schemaTest2 = SchemaBuilder + .record("MyTest2") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + private int port; + private RestServer restServer; + + @Before + public void setUp() { + executor = Executors.newScheduledThreadPool(10); + ScheduledExecutorService executor = Executors.newScheduledThreadPool(Config.THREAD_POOL_SIZE); + + port = TestUtils.getAvailableListenPort(); + ServiceConfig serviceConfig = ServiceConfig.builder().port(port).build(); + SchemaStore store = getStore(); + + SchemaRegistryService service = new SchemaRegistryService(store, executor); + + restServer = new RestServer(service, serviceConfig); + restServer.startAsync(); + restServer.awaitRunning(); + } + + @After + public void tearDown() { + restServer.stopAsync(); + restServer.awaitTerminated(); + executor.shutdownNow(); + } + + @Test + public void testEndToEnd() { + SchemaRegistryClient client = SchemaRegistryClientFactory.withDefaultNamespace( + SchemaRegistryClientConfig.builder().schemaRegistryUri(URI.create("http://localhost:" + port)).build()); + + String group = "group"; + + int groupsCount = Lists.newArrayList(client.listGroups()).size(); + + client.addGroup(group, new GroupProperties(SerializationFormat.Avro, + Compatibility.backward(), + true)); + assertEquals(Lists.newArrayList(client.listGroups()).size(), groupsCount + 1); + + String myTest = "MyTest"; + SchemaInfo schemaInfo = new SchemaInfo(myTest, SerializationFormat.Avro, + ByteBuffer.wrap(schema1.toString().getBytes(Charsets.UTF_8)), ImmutableMap.of()); + + VersionInfo version1 = client.addSchema(group, schemaInfo); + assertEquals(version1.getVersion(), 0); + assertEquals(version1.getId(), 0); + assertEquals(version1.getType(), myTest); + // attempt to add an existing schema + version1 = client.addSchema(group, schemaInfo); + assertEquals(version1.getVersion(), 0); + assertEquals(version1.getId(), 0); + assertEquals(version1.getType(), myTest); + + SchemaInfo schemaInfo2 = new SchemaInfo(myTest, SerializationFormat.Avro, + ByteBuffer.wrap(schema2.toString().getBytes(Charsets.UTF_8)), ImmutableMap.of()); + VersionInfo version2 = client.addSchema(group, schemaInfo2); + assertEquals(version2.getVersion(), 1); + assertEquals(version2.getId(), 1); + assertEquals(version2.getType(), myTest); + + assertTrue(client.updateCompatibility(group, Compatibility.backwardTillAndForwardOne(version1), null)); + + assertFalse(client.updateCompatibility(group, Compatibility.fullTransitive(), Compatibility.forward())); + + assertTrue(client.updateCompatibility(group, Compatibility.fullTransitive(), null)); + + assertTrue(client.updateCompatibility(group, Compatibility.backward(), Compatibility.fullTransitive())); + + assertFalse(client.updateCompatibility(group, Compatibility.backward(), Compatibility.fullTransitive())); + + assertTrue(client.updateCompatibility(group, Compatibility.fullTransitive(), Compatibility.backward())); + + SchemaInfo schemaInfo3 = new SchemaInfo(myTest, SerializationFormat.Avro, + ByteBuffer.wrap(schema3.toString().getBytes(Charsets.UTF_8)), ImmutableMap.of()); + + AssertExtensions.assertThrows("", () -> client.addSchema(group, schemaInfo3), + e -> Exceptions.unwrap(e) instanceof RegistryExceptions.SchemaValidationFailedException); + + String myTest2 = "MyTest2"; + SchemaInfo schemaInfo4 = new SchemaInfo(myTest2, SerializationFormat.Avro, + ByteBuffer.wrap(schemaTest2.toString().getBytes(Charsets.UTF_8)), ImmutableMap.of()); + VersionInfo version3 = client.addSchema(group, schemaInfo4); + assertEquals(version3.getVersion(), 0); + assertEquals(version3.getId(), 2); + assertEquals(version3.getType(), myTest2); + + List types = client.getSchemas(group).stream().map(x -> x.getSchemaInfo().getType()).collect(Collectors.toList()); + assertEquals(types.size(), 2); + assertTrue(types.contains(myTest)); + assertTrue(types.contains(myTest2)); + List groupEvolutionHistory = client.getGroupHistory(group); + assertEquals(groupEvolutionHistory.size(), 3); + List myTestHistory = client.getSchemaVersions(group, myTest); + assertEquals(myTestHistory.size(), 2); + List myTest2History = client.getSchemaVersions(group, myTest2); + assertEquals(myTest2History.size(), 1); + + // delete schemainfo2 + EncodingId encodingId = client.getEncodingId(group, version2, Codecs.None.getCodec().getName()); + assertEquals(encodingId.getId(), 0); + client.deleteSchemaVersion(group, version2); + SchemaInfo schema = client.getSchemaForVersion(group, version2); + assertEquals(schema, schemaInfo2); + AssertExtensions.assertThrows("", () -> client.getVersionForSchema(group, schemaInfo2), + e -> Exceptions.unwrap(e) instanceof RegistryExceptions.ResourceNotFoundException); + encodingId = client.getEncodingId(group, version2, Codecs.None.getCodec().getName()); + assertEquals(encodingId.getId(), 0); + AssertExtensions.assertThrows("", () -> client.getEncodingId(group, version2, Codecs.GzipCompressor.getCodec().getName()), + e -> Exceptions.unwrap(e) instanceof RegistryExceptions.ResourceNotFoundException); + + groupEvolutionHistory = client.getGroupHistory(group); + assertEquals(groupEvolutionHistory.size(), 2); + + myTestHistory = client.getSchemaVersions(group, myTest); + assertEquals(myTestHistory.size(), 1); + SchemaWithVersion schemaWithVersion = client.getLatestSchemaVersion(group, myTest); + assertEquals(schemaWithVersion.getVersionInfo(), version1); + + schemaWithVersion = client.getLatestSchemaVersion(group, null); + assertEquals(schemaWithVersion.getVersionInfo(), version3); + + // add the schema again. it should get a new version + VersionInfo version4 = client.addSchema(group, schemaInfo2); + assertEquals(version4.getId(), 3); + assertEquals(version4.getVersion(), 2); + + client.removeGroup(group); + } + + @Test + public void testLargeSchemas() { + SchemaRegistryClient client = SchemaRegistryClientFactory.withDefaultNamespace( + SchemaRegistryClientConfig.builder().schemaRegistryUri(URI.create("http://localhost:" + port)).build()); + + String group = "group"; + + // recreate group with different properties + SerializationFormat custom = SerializationFormat.custom("a"); + client.addGroup(group, new GroupProperties(custom, + Compatibility.allowAny(), + true)); + + // generate a schema of 8 mb in size + byte[] array = new byte[8 * 1024 * 1024]; + RANDOM.nextBytes(array); + + SchemaInfo s = new SchemaInfo("x", custom, + ByteBuffer.wrap(array), ImmutableMap.of()); + + VersionInfo v = client.addSchema(group, s); + assertEquals(v.getId(), 0); + assertEquals(v.getVersion(), 0); + assertEquals(v.getType(), "x"); + + assertEquals(client.getSchemaReferences(s).size(), 1); + assertEquals(client.getSchemas(group).get(0).getSchemaInfo(), s); + assertEquals(client.getGroupHistory(group).get(0).getSchemaInfo(), s); + assertEquals(v, client.getVersionForSchema(group, s)); + } + + abstract SchemaStore getStore(); +} + diff --git a/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestInMemoryEndToEnd.java b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestInMemoryEndToEnd.java new file mode 100644 index 000000000..4ce903104 --- /dev/null +++ b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestInMemoryEndToEnd.java @@ -0,0 +1,19 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.integrationtest; + +import io.pravega.schemaregistry.storage.SchemaStore; +import io.pravega.schemaregistry.storage.SchemaStoreFactory; + +public class TestInMemoryEndToEnd extends TestEndToEnd { + SchemaStore getStore() { + return SchemaStoreFactory.createInMemoryStore(executor); + } +} diff --git a/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestPravegaClientEndToEnd.java b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestPravegaClientEndToEnd.java new file mode 100644 index 000000000..35b8336a4 --- /dev/null +++ b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestPravegaClientEndToEnd.java @@ -0,0 +1,1375 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.integrationtest; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.client.ClientConfig; +import io.pravega.client.EventStreamClientFactory; +import io.pravega.client.admin.ReaderGroupManager; +import io.pravega.client.admin.StreamManager; +import io.pravega.client.admin.impl.ReaderGroupManagerImpl; +import io.pravega.client.admin.impl.StreamManagerImpl; +import io.pravega.client.connection.impl.SocketConnectionFactoryImpl; +import io.pravega.client.stream.EventRead; +import io.pravega.client.stream.EventStreamReader; +import io.pravega.client.stream.EventStreamWriter; +import io.pravega.client.stream.EventWriterConfig; +import io.pravega.client.stream.ReaderConfig; +import io.pravega.client.stream.ReaderGroupConfig; +import io.pravega.client.stream.ScalingPolicy; +import io.pravega.client.stream.Serializer; +import io.pravega.client.stream.StreamConfiguration; +import io.pravega.common.Exceptions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.client.exceptions.RegistryExceptions; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.CodecType; +import io.pravega.schemaregistry.contract.data.Compatibility; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.pravegastandalone.PravegaStandaloneUtils; +import io.pravega.schemaregistry.serializer.avro.impl.AvroSerializerFactory; +import io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test1; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test2; +import io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test3; +import io.pravega.schemaregistry.serializer.json.impl.JsonSerializerFactory; +import io.pravega.schemaregistry.serializer.json.schemas.JSONSchema; +import io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest; +import io.pravega.schemaregistry.serializer.protobuf.impl.ProtobufSerializerFactory; +import io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema; +import io.pravega.schemaregistry.serializer.shared.codec.Codec; +import io.pravega.schemaregistry.serializer.shared.codec.Codecs; +import io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig; +import io.pravega.schemaregistry.serializers.SerializerFactory; +import io.pravega.schemaregistry.serializers.WithSchema; +import io.pravega.schemaregistry.server.rest.RestServer; +import io.pravega.schemaregistry.server.rest.ServiceConfig; +import io.pravega.schemaregistry.service.SchemaRegistryService; +import io.pravega.schemaregistry.storage.SchemaStore; +import io.pravega.schemaregistry.storage.SchemaStoreFactory; +import io.pravega.shared.NameUtils; +import io.pravega.test.common.AssertExtensions; +import io.pravega.test.common.TestUtils; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.Schema; +import org.apache.avro.SchemaBuilder; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.generic.GenericRecordBuilder; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.After; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.net.URI; +import java.nio.ByteBuffer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import static org.junit.Assert.*; + +@Slf4j +public class TestPravegaClientEndToEnd implements AutoCloseable { + private static final Schema SCHEMA1 = SchemaBuilder + .record("MyTest") + .namespace("a.b.c") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + private static final Schema SCHEMA2 = SchemaBuilder + .record("MyTest") + .namespace("a.b.c") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .withDefault("backwardPolicy compatible with schema1") + .endRecord(); + + private static final Schema SCHEMA3 = SchemaBuilder + .record("MyTest") + .namespace("a.b.c") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("c") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + @Rule + public Timeout globalTimeout = new Timeout(3, TimeUnit.MINUTES); + + private final ClientConfig clientConfig; + + private final SchemaStore schemaStore; + private final ScheduledExecutorService executor; + private final SchemaRegistryService service; + private final SchemaRegistryClient client; + private final PravegaStandaloneUtils pravegaStandaloneUtils; + private Random random; + private final int port; + private final RestServer restServer; + + public TestPravegaClientEndToEnd() throws Exception { + pravegaStandaloneUtils = PravegaStandaloneUtils.startPravega(); + executor = Executors.newScheduledThreadPool(10); + + clientConfig = ClientConfig.builder().controllerURI(URI.create(pravegaStandaloneUtils.getControllerURI())).build(); + + schemaStore = SchemaStoreFactory.createPravegaStore(clientConfig, executor); + + service = new SchemaRegistryService(schemaStore, executor); + port = TestUtils.getAvailableListenPort(); + ServiceConfig serviceConfig = ServiceConfig.builder().port(port).build(); + + restServer = new RestServer(service, serviceConfig); + restServer.startAsync(); + restServer.awaitRunning(); + client = SchemaRegistryClientFactory.withDefaultNamespace( + SchemaRegistryClientConfig.builder().schemaRegistryUri(URI.create("http://localhost:" + port)).build()); + random = new Random(); + } + + @Override + @After + public void close() throws Exception { + restServer.stopAsync(); + restServer.awaitTerminated(); + executor.shutdownNow(); + } + + @Test + public void testRestApis() { + // create stream + String groupId = "myGroup"; + SchemaInfo schemaInfo = AvroSchema.of(SCHEMA1).getSchemaInfo(); + SchemaInfo schemaInfo1 = AvroSchema.of(SCHEMA2).getSchemaInfo(); + SchemaInfo schemaInfo2 = AvroSchema.of(SCHEMA3).getSchemaInfo(); + + Map references = client.getSchemaReferences(schemaInfo); + int preTestCount = references.size(); + + client.addGroup(groupId, new GroupProperties(SerializationFormat.Avro, Compatibility.allowAny(), true)); + client.addSchema(groupId, schemaInfo); + client.addSchema(groupId, schemaInfo1); + client.addSchema(groupId, schemaInfo2); + + VersionInfo version1 = client.getVersionForSchema(groupId, schemaInfo); + assertEquals(version1.getId(), 0); + VersionInfo version2 = client.getVersionForSchema(groupId, schemaInfo1); + assertEquals(version2.getId(), 1); + VersionInfo version3 = client.getVersionForSchema(groupId, schemaInfo2); + assertEquals(version3.getId(), 2); + + references = client.getSchemaReferences(schemaInfo); + assertEquals(references.size(), preTestCount + 1); + + String groupId2 = "mygrp2"; + client.addGroup(groupId2, new GroupProperties(SerializationFormat.Avro, Compatibility.allowAny(), true)); + client.addSchema(groupId2, schemaInfo); + + references = client.getSchemaReferences(schemaInfo); + assertEquals(references.size(), preTestCount + 2); + } + + @Test + public void testAvroSchemaEvolution() { + // create stream + String scope = "scope"; + String stream = "avroevolution"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Avro; + + AvroSchema schema1 = AvroSchema.of(SCHEMA1); + AvroSchema schema2 = AvroSchema.of(SCHEMA2); + AvroSchema schema3 = AvroSchema.of(SCHEMA3); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.backward(), + true) + .registerSchema(true) + .registryClient(client) + .build(); + + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + // region writer with schema1 + Serializer serializer = AvroSerializerFactory.serializer(serializerConfig, schema1); + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + GenericRecord record = new GenericRecordBuilder(SCHEMA1).set("a", "test").build(); + writer.writeEvent(record).join(); + // endregion + + // region writer with schema2 + serializer = AvroSerializerFactory.serializer(serializerConfig, schema2); + + writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + record = new GenericRecordBuilder(SCHEMA2).set("a", "test").set("b", "value").build(); + writer.writeEvent(record).join(); + // endregion + + // region writer with schema3 + // this should throw exception as schema change is not backwardPolicy compatible. + AssertExtensions.assertThrows("", () -> AvroSerializerFactory.serializer(serializerConfig, schema3), + ex -> Exceptions.unwrap(ex) instanceof RegistryExceptions.SchemaValidationFailedException); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + AvroSchema readSchema = AvroSchema.of(SCHEMA2); + + Serializer deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, readSchema); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + // read two events successfully + EventRead event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + + reader.close(); + // create new reader, this time with incompatible schema3 + + String rg1 = "rg1" + stream; + readerGroupManager.createReaderGroup(rg1, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + AvroSchema readSchemaEx = AvroSchema.of(SCHEMA3); + + AssertExtensions.assertThrows("", () -> AvroSerializerFactory.genericDeserializer(serializerConfig, readSchemaEx), + ex -> Exceptions.unwrap(ex) instanceof IllegalArgumentException); + reader.close(); + // endregion + + // region read into writer schema + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null); + + reader = clientFactory.createReader("r1", rg2, deserializer, ReaderConfig.builder().build()); + + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + reader.close(); + readerGroupManager.close(); + // endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testCodec() { + // create stream + String scope = "scope"; + String stream = "avrocodec"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Avro; + + AvroSchema schema1 = AvroSchema.of(SCHEMA1); + AvroSchema schema2 = AvroSchema.of(SCHEMA2); + AvroSchema schema3 = AvroSchema.of(Test1.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.backward(), + true) + .registerSchema(true) + .registerCodec(true) + .registryClient(client) + .build(); + + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + // region writer with schema1 + Serializer serializer = AvroSerializerFactory.serializer(serializerConfig, schema1); + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + GenericRecord record = new GenericRecordBuilder(SCHEMA1).set("a", "test").build(); + writer.writeEvent(record).join(); + // endregion + + // region writer with schema2 + Serializer serializer2 = AvroSerializerFactory.serializer(serializerConfig, schema2); + + writer = clientFactory.createEventWriter(stream, serializer2, EventWriterConfig.builder().build()); + record = new GenericRecordBuilder(SCHEMA2).set("a", "test").set("b", "value").build(); + writer.writeEvent(record).join(); + // endregion + + // region writer with codec gzip + serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .registerSchema(true) + .registerCodec(true) + .encoder(Codecs.GzipCompressor.getCodec()) + .registryClient(client) + .build(); + + Serializer serializer3 = AvroSerializerFactory.serializer(serializerConfig, schema3); + EventStreamWriter writer3 = clientFactory.createEventWriter(stream, serializer3, EventWriterConfig.builder().build()); + String bigString = generateBigString(1); + writer3.writeEvent(new Test1(bigString, 1)).join(); + + List list = client.getCodecTypes(groupId); + assertEquals(2, list.size()); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.None.getCodec().getCodecType()))); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.GzipCompressor.getCodec().getCodecType()))); + // endregion + + // region writer with codec snappy + serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .registerSchema(true) + .registerCodec(true) + .encoder(Codecs.SnappyCompressor.getCodec()) + .registryClient(client) + .build(); + + Serializer serializer4 = AvroSerializerFactory.serializer(serializerConfig, schema3); + EventStreamWriter writer4 = clientFactory.createEventWriter(stream, serializer4, EventWriterConfig.builder().build()); + String bigString2 = generateBigString(200); + writer4.writeEvent(new Test1(bigString2, 1)).join(); + + list = client.getCodecTypes(groupId); + assertEquals(3, list.size()); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.None.getCodec().getCodecType()))); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.GzipCompressor.getCodec().getCodecType()))); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.SnappyCompressor.getCodec().getCodecType()))); + // endregion + + // region reader + serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .registryClient(client) + .build(); + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream + System.currentTimeMillis(); + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + while (event.isCheckpoint() || event.getEvent() != null) { + Object e = event.getEvent(); + event = reader.readNextEvent(10000); + } + // endregion + + // region writer with custom codec + CodecType mycodec = new CodecType("mycodec"); + Codec myCodec = new Codec() { + @Override + public String getName() { + return mycodec.getName(); + } + + @Override + public CodecType getCodecType() { + return mycodec; + } + + @SneakyThrows + @Override + public void encode(ByteBuffer data, ByteArrayOutputStream bos) { + bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); + } + + @SneakyThrows + @Override + public ByteBuffer decode(ByteBuffer data, Map properties) { + return data; + } + }; + serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .registerSchema(true) + .registerCodec(true) + .encoder(myCodec) + .registryClient(client) + .build(); + + Serializer serializer5 = AvroSerializerFactory.serializer(serializerConfig, schema3); + EventStreamWriter writer2 = clientFactory.createEventWriter(stream, serializer5, EventWriterConfig.builder().build()); + String bigString3 = generateBigString(300); + writer2.writeEvent(new Test1(bigString3, 1)).join(); + // endregion + + list = client.getCodecTypes(groupId); + assertEquals(4, list.size()); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.None.getCodec().getCodecType()))); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.GzipCompressor.getCodec().getCodecType()))); + assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.SnappyCompressor.getCodec().getCodecType()))); + assertTrue(list.stream().anyMatch(x -> x.equals(mycodec))); + reader.close(); + + // region new reader with additional codec + // add new decoder for custom serialization + SerializerConfig serializerConfig2 = SerializerConfig.builder() + .groupId(groupId) + .decoder(myCodec.getName(), myCodec) + .registryClient(client) + .build(); + + Serializer deserializer2 = AvroSerializerFactory.genericDeserializer(serializerConfig2, null); + + EventStreamReader reader2 = clientFactory.createReader("r2", rg, deserializer2, ReaderConfig.builder().build()); + + event = reader2.readNextEvent(10000); + while (event.isCheckpoint() || event.getEvent() != null) { + Object e = event.getEvent(); + event = reader2.readNextEvent(10000); + } + // endregion + + writer.close(); + writer2.close(); + writer3.close(); + writer4.close(); + reader.close(); + reader2.close(); + readerGroupManager.close(); + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + private String generateBigString(int sizeInKb) { + byte[] array = new byte[1024 * sizeInKb]; + random.nextBytes(array); + return Base64.getEncoder().encodeToString(array); + } + + @Test + public void testAvroReflect() throws IOException { + // create stream + String scope = "scope"; + String stream = "avroreflect"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Avro; + + AvroSchema schema = AvroSchema.of(TestClass.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.backward(), + true) + .registerSchema(true) + .registryClient(client) + .build(); + + // region writer + Serializer serializer = AvroSerializerFactory.serializer(serializerConfig, schema); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(new TestClass("test")).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rgx" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + AvroSchema readSchema1 = AvroSchema.of(TestClass.class); + + Serializer deserializer1 = AvroSerializerFactory.deserializer(serializerConfig, readSchema1); + + EventStreamReader reader1 = clientFactory.createReader("r1", rg, deserializer1, ReaderConfig.builder().build()); + + EventRead event1 = reader1.readNextEvent(10000); + assertNotNull(event1.getEvent()); + reader1.close(); + // endregion + // region read into specific schema + readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + AvroSchema readSchema = AvroSchema.of(ReflectData.get().getSchema(TestClass.class)); + + Serializer deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, readSchema); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + reader.close(); + // endregion + // region read into writer schema + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null); + + reader = clientFactory.createReader("r1", rg2, deserializer, ReaderConfig.builder().build()); + + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + reader.close(); + readerGroupManager.close(); + // endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testAvroGenerated() throws IOException { + // create stream + String scope = "scope"; + String stream = "avrogenerated"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Avro; + + AvroSchema schema = AvroSchema.of(Test1.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.backward(), + true) + .registerSchema(true) + .registryClient(client) + .build(); + // region writer + Serializer serializer = AvroSerializerFactory.serializer(serializerConfig, schema); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(new Test1("test", 1000)).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + AvroSchema readSchema = AvroSchema.of(Test1.class); + + Serializer deserializer = AvroSerializerFactory.deserializer(serializerConfig, readSchema); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + assertEquals("test", event.getEvent().getName().toString()); + assertEquals(1000, event.getEvent().getField1()); + reader.close(); + // endregion + // region read into writer schema + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer genericDeserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null); + + EventStreamReader reader2 = clientFactory.createReader("r1", rg2, genericDeserializer, ReaderConfig.builder().build()); + + EventRead event2 = reader2.readNextEvent(10000); + assertNotNull(event2.getEvent()); + readerGroupManager.close(); + // endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testAvroMultiplexed() throws IOException { + // create stream + String scope = "scope"; + String stream = "avromultiplexed"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Avro; + + AvroSchema schema1 = AvroSchema.ofSpecificRecord(Test1.class); + AvroSchema schema2 = AvroSchema.ofSpecificRecord(Test2.class); + AvroSchema schema3 = AvroSchema.ofSpecificRecord(Test3.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.backward(), + true) + .registerSchema(true) + .registryClient(client) + .build(); + // region writer + Map, AvroSchema> map = new HashMap<>(); + map.put(Test1.class, schema1); + map.put(Test2.class, schema2); + map.put(Test3.class, schema3); + Serializer serializer = SerializerFactory.avroMultiTypeSerializer(serializerConfig, map); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(new Test1("test", 0)).join(); + writer.writeEvent(new Test2("test", 0, "test")).join(); + writer.writeEvent(new Test3("test", 0, "test", "test")).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer deserializer = SerializerFactory.avroMultiTypeDeserializer(serializerConfig, map); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + EventRead event1 = reader.readNextEvent(10000); + assertNotNull(event1.getEvent()); + assertTrue(event1.getEvent() instanceof Test1); + EventRead event2 = reader.readNextEvent(10000); + assertNotNull(event2.getEvent()); + assertTrue(event2.getEvent() instanceof Test2); + EventRead event3 = reader.readNextEvent(10000); + assertNotNull(event3.getEvent()); + assertTrue(event3.getEvent() instanceof Test3); + + // endregion + // region read into writer schema + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer genericDeserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null); + + EventStreamReader reader2 = clientFactory.createReader("r1", rg2, genericDeserializer, ReaderConfig.builder().build()); + + EventRead genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + // endregion + + // region read using multiplexed and generic record combination + String rg3 = "rg3" + stream; + readerGroupManager.createReaderGroup(rg3, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Map, AvroSchema> map2 = new HashMap<>(); + // add only two schemas + map2.put(Test1.class, schema1); + map2.put(Test2.class, schema2); + + Serializer> eitherDeserializer = + SerializerFactory.avroTypedOrGenericDeserializer(serializerConfig, map2); + + EventStreamReader> reader3 = clientFactory.createReader("r1", rg3, eitherDeserializer, ReaderConfig.builder().build()); + + EventRead> e1 = reader3.readNextEvent(10000); + assertNotNull(e1.getEvent()); + assertTrue(e1.getEvent().isLeft()); + assertTrue(e1.getEvent().getLeft() instanceof Test1); + e1 = reader3.readNextEvent(10000); + assertTrue(e1.getEvent().isLeft()); + assertTrue(e1.getEvent().getLeft() instanceof Test2); + e1 = reader3.readNextEvent(10000); + assertTrue(e1.getEvent().isRight()); + + reader.close(); + reader2.close(); + reader3.close(); + readerGroupManager.close(); + //endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testProtobuf() throws IOException { + testProtobuf(true); + testProtobuf(false); + } + + private void testProtobuf(boolean encodeHeaders) throws IOException { + // create stream + String scope = "scope"; + String stream = "protobuf" + encodeHeaders; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Protobuf; + client.addGroup(groupId, new GroupProperties(serializationFormat, + Compatibility.allowAny(), false, ImmutableMap.of())); + + ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.allowAny(), + false) + .registerSchema(true) + .registryClient(client) + .writeEncodingHeader(encodeHeaders) + .build(); + // region writer + Serializer serializer = ProtobufSerializerFactory.serializer(serializerConfig, schema); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(ProtobufTest.Message1.newBuilder().setName("test").setInternal(ProtobufTest.InternalMessage.newBuilder().setValue(ProtobufTest.InternalMessage.Values.val1).build()).build()).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String readerGroupName = "rg" + stream; + readerGroupManager.createReaderGroup(readerGroupName, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer deserializer = ProtobufSerializerFactory.deserializer(serializerConfig, schema); + + EventStreamReader reader = clientFactory.createReader("r1", readerGroupName, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + reader.close(); + // endregion + + // region generic read + // 1. try without passing the schema. writer schema will be used to read for encoding header and latest schema will be used for non encoded header + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + ProtobufSchema readerSchema = encodeHeaders ? null : + ProtobufSchema.from(client.getLatestSchemaVersion(groupId, null).getSchemaInfo()); + Serializer genericDeserializer = ProtobufSerializerFactory.genericDeserializer(serializerConfig, readerSchema); + + EventStreamReader reader2 = clientFactory.createReader("r1", rg2, genericDeserializer, ReaderConfig.builder().build()); + + EventRead event2 = reader2.readNextEvent(10000); + assertNotNull(event2.getEvent()); + + reader2.close(); + + // 2. try with passing the schema. reader schema will be used to read + String rg3 = "rg3" + encodeHeaders; + readerGroupManager.createReaderGroup(rg3, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message1.getDescriptor().getFullName(), descriptorSet); + genericDeserializer = ProtobufSerializerFactory.genericDeserializer(serializerConfig, schema2); + + reader2 = clientFactory.createReader("r1", rg3, genericDeserializer, ReaderConfig.builder().build()); + + event2 = reader2.readNextEvent(10000); + assertNotNull(event2.getEvent()); + + reader2.close(); + // endregion + + if (encodeHeaders) { + String rg4 = "rg4" + System.currentTimeMillis(); + readerGroupManager.createReaderGroup(rg4, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer jsonDes = SerializerFactory.deserializeAsJsonString(serializerConfig); + + EventStreamReader jsonReader = clientFactory.createReader("r1", rg4, jsonDes, ReaderConfig.builder().build()); + + EventRead jsonEvent = jsonReader.readNextEvent(10000); + assertNotNull(jsonEvent.getEvent()); + + jsonReader.close(); + } + readerGroupManager.close(); + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testProtobufMultiplexed() throws IOException { + // create stream + String scope = "scope"; + String stream = "protomultiplexed"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Protobuf; + client.addGroup(groupId, new GroupProperties(serializationFormat, + Compatibility.allowAny(), true)); + + ProtobufSchema schema1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class); + ProtobufSchema schema2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class); + ProtobufSchema schema3 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message3.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.allowAny(), + true) + .registerSchema(true) + .registryClient(client) + .build(); + // region writer + Map, ProtobufSchema> map = new HashMap<>(); + map.put(ProtobufTest.Message1.class, schema1); + map.put(ProtobufTest.Message2.class, schema2); + map.put(ProtobufTest.Message3.class, schema3); + Serializer serializer = SerializerFactory.protobufMultiTypeSerializer(serializerConfig, map); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(ProtobufTest.Message1.newBuilder().setName("test").setInternal(ProtobufTest.InternalMessage.newBuilder().setValue(ProtobufTest.InternalMessage.Values.val1).build()).build()).join(); + writer.writeEvent(ProtobufTest.Message2.newBuilder().setName("test").setField1(0).build()).join(); + writer.writeEvent(ProtobufTest.Message3.newBuilder().setName("test").setField1(0).setField2(1).build()).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer deserializer = SerializerFactory.protobufMultiTypeDeserializer(serializerConfig, map); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + assertTrue(event.getEvent() instanceof ProtobufTest.Message1); + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + assertTrue(event.getEvent() instanceof ProtobufTest.Message2); + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + assertTrue(event.getEvent() instanceof ProtobufTest.Message3); + + reader.close(); + // endregion + // region read into writer schema + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer genericDeserializer = ProtobufSerializerFactory.genericDeserializer(serializerConfig, null); + + EventStreamReader reader2 = clientFactory.createReader("r1", rg2, genericDeserializer, ReaderConfig.builder().build()); + + EventRead genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + genEvent = reader2.readNextEvent(10000); + + reader2.close(); + // endregion + + // region read using multiplexed and generic record combination + String rg3 = "rg3" + stream; + readerGroupManager.createReaderGroup(rg3, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Map, ProtobufSchema> map2 = new HashMap<>(); + // add only two schemas + map2.put(ProtobufTest.Message1.class, schema1); + map2.put(ProtobufTest.Message2.class, schema2); + + Serializer> eitherDeserializer = + SerializerFactory.protobufTypedOrGenericDeserializer(serializerConfig, map2); + + EventStreamReader> reader3 = clientFactory.createReader("r1", rg3, eitherDeserializer, ReaderConfig.builder().build()); + + EventRead> e1 = reader3.readNextEvent(10000); + assertNotNull(e1.getEvent()); + assertTrue(e1.getEvent().isLeft()); + assertTrue(e1.getEvent().getLeft() instanceof ProtobufTest.Message1); + e1 = reader3.readNextEvent(10000); + assertTrue(e1.getEvent().isLeft()); + assertTrue(e1.getEvent().getLeft() instanceof ProtobufTest.Message2); + e1 = reader3.readNextEvent(10000); + assertTrue(e1.getEvent().isRight()); + + reader3.close(); + readerGroupManager.close(); + //endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testJson() throws IOException { + testJson(true); + testJson(false); + } + + private void testJson(boolean encodeHeaders) throws IOException { + // create stream + String scope = "scope"; + String stream = "json" + encodeHeaders; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Json; + client.addGroup(groupId, new GroupProperties(serializationFormat, + Compatibility.allowAny(), false, ImmutableMap.of())); + + JSONSchema schema = JSONSchema.of(DerivedUser2.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.allowAny(), + false) + .registerSchema(true) + .registryClient(client) + .writeEncodingHeader(encodeHeaders) + .build(); + // region writer + Serializer serializer = JsonSerializerFactory.serializer(serializerConfig, schema); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(new DerivedUser2("name", new Address("street", "city"), 30, "user2")).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String readerGroupName = "rg" + stream; + readerGroupManager.createReaderGroup(readerGroupName, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer deserializer = JsonSerializerFactory.deserializer(serializerConfig, schema); + + EventStreamReader reader = clientFactory.createReader("r1", readerGroupName, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + reader.close(); + // endregion + + // region generic read + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer genericDeserializer = JsonSerializerFactory.genericDeserializer(serializerConfig); + + EventStreamReader reader2 = clientFactory.createReader("r1", rg2, genericDeserializer, ReaderConfig.builder().build()); + + EventRead event2 = reader2.readNextEvent(10000); + assertNotNull(event2.getEvent()); + reader2.close(); + + String rg3 = "rg3" + stream; + readerGroupManager.createReaderGroup(rg3, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer> genericDeserializer2 = SerializerFactory.jsonGenericDeserializer(serializerConfig); + + EventStreamReader> reader3 = clientFactory.createReader("r1", rg3, genericDeserializer2, ReaderConfig.builder().build()); + + EventRead> event3 = reader3.readNextEvent(10000); + assertNotNull(event3.getEvent()); + WithSchema obj = event3.getEvent(); + + assertEquals(obj.hasJsonSchema(), encodeHeaders); + if (encodeHeaders) { + assertNotNull(obj.getJsonSchema()); + } + reader3.close(); + readerGroupManager.close(); + // endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + public void testJsonMultiplexed() throws IOException { + // create stream + String scope = "scope"; + String stream = "jsonmultiplexed"; + String groupId = NameUtils.getScopedStreamName(scope, stream); + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + SerializationFormat serializationFormat = SerializationFormat.Json; + + JSONSchema schema1 = JSONSchema.ofBaseType(DerivedUser1.class, User.class); + JSONSchema schema2 = JSONSchema.ofBaseType(DerivedUser2.class, User.class); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(serializationFormat, + Compatibility.allowAny(), + true) + .registerSchema(true) + .registryClient(client) + .build(); + // region writer + Map, JSONSchema> map = new HashMap<>(); + map.put(DerivedUser1.class, schema1); + map.put(DerivedUser2.class, schema2); + Serializer serializer = SerializerFactory.jsonMultiTypeSerializer(serializerConfig, map); + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + writer.writeEvent(new DerivedUser2()).join(); + writer.writeEvent(new DerivedUser1()).join(); + writer.close(); + // endregion + + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer deserializer = SerializerFactory.jsonMultiTypeDeserializer(serializerConfig, map); + + EventStreamReader reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + EventRead event = reader.readNextEvent(10000); + + assertNotNull(event.getEvent()); + assertTrue(event.getEvent() instanceof DerivedUser2); + event = reader.readNextEvent(10000); + assertNotNull(event.getEvent()); + assertTrue(event.getEvent() instanceof DerivedUser1); + reader.close(); + // endregion + + // region read into writer schema + String rg2 = "rg2" + stream; + readerGroupManager.createReaderGroup(rg2, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Serializer genericDeserializer = JsonSerializerFactory.genericDeserializer(serializerConfig); + + EventStreamReader reader2 = clientFactory.createReader("r1", rg2, genericDeserializer, ReaderConfig.builder().build()); + + EventRead genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + genEvent = reader2.readNextEvent(10000); + assertNotNull(genEvent.getEvent()); + reader2.close(); + // endregion + + // region read using multiplexed and generic record combination + String rg3 = "rg3" + stream; + readerGroupManager.createReaderGroup(rg3, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + Map, JSONSchema> map2 = new HashMap<>(); + // add only one schema + map2.put(DerivedUser1.class, schema1); + + Serializer>> eitherDeserializer = + SerializerFactory.jsonTypedOrGenericDeserializer(serializerConfig, map2); + + EventStreamReader>> reader3 = clientFactory.createReader("r1", rg3, eitherDeserializer, ReaderConfig.builder().build()); + + EventRead>> e1 = reader3.readNextEvent(10000); + assertNotNull(e1.getEvent()); + assertTrue(e1.getEvent().isRight()); + e1 = reader3.readNextEvent(10000); + assertTrue(e1.getEvent().isLeft()); + assertTrue(e1.getEvent().getLeft() instanceof DerivedUser1); + reader3.close(); + readerGroupManager.close(); + //endregion + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Test + @SneakyThrows + public void testMultiFormatSerializerAndDeserializer() { + String scope = "multi"; + String stream = "multi"; + String groupId = "multi"; + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + // region avro + SerializerConfig serializerConfig = SerializerConfig.builder() + .groupId(groupId) + .createGroup(SerializationFormat.Any, Compatibility.allowAny(), true) + .registerSchema(true) + .registryClient(client) + .build(); + + AvroSchema avro = AvroSchema.of(Test1.class); + Serializer serializer = AvroSerializerFactory.serializer(serializerConfig, avro); + EventStreamWriter avroWriter = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + avroWriter.writeEvent(new Test1("a", 1)).join(); + // endregion + + // region proto + ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class); + Serializer serializer2 = ProtobufSerializerFactory.serializer(serializerConfig, schema); + + EventStreamWriter protoWriter = clientFactory + .createEventWriter(stream, serializer2, EventWriterConfig.builder().build()); + + ProtobufTest.Message1 type1 = ProtobufTest.Message1.newBuilder().setName("test") + .setInternal(ProtobufTest.InternalMessage.newBuilder().setValue(ProtobufTest.InternalMessage.Values.val3).build()) + .build(); + protoWriter.writeEvent(type1).join(); + // endregion + + // region write json + JSONSchema jsonSchema = JSONSchema.of(DerivedUser1.class); + Serializer serializer3 = JsonSerializerFactory.serializer(serializerConfig, jsonSchema); + // endregion + + EventStreamWriter jsonWriter = clientFactory.createEventWriter(stream, serializer3, EventWriterConfig.builder().build()); + jsonWriter.writeEvent(new DerivedUser1("json", new Address("a", "b"), 1, "users")).join(); + + // read using multiformat deserializer + Serializer> deserializer = SerializerFactory.deserializerWithSchema(serializerConfig); + // region read into specific schema + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream + System.currentTimeMillis(); + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + EventStreamReader> reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build()); + + // read 3 events + EventRead> event1 = reader.readNextEvent(1000); + String jsonString = event1.getEvent().getJsonString(); + assertTrue(event1.getEvent().getObject() instanceof GenericRecord); + EventRead> event2 = reader.readNextEvent(1000); + assertTrue(event2.getEvent().getObject() instanceof DynamicMessage); + EventRead> event3 = reader.readNextEvent(1000); + assertTrue(event3.getEvent().getObject() instanceof JsonNode); + + // write using genericserializer + Serializer> genericSerializer = SerializerFactory.serializerWithSchema(serializerConfig); + + EventStreamWriter> genericWriter = clientFactory + .createEventWriter(stream, genericSerializer, EventWriterConfig.builder().build()); + + genericWriter.writeEvent(event1.getEvent()); + genericWriter.writeEvent(event2.getEvent()); + genericWriter.writeEvent(event3.getEvent()); + // endregion + + // read these events back + event1 = reader.readNextEvent(1000); + assertTrue(event1.getEvent().getObject() instanceof GenericRecord); + event2 = reader.readNextEvent(1000); + assertTrue(event2.getEvent().getObject() instanceof DynamicMessage); + event3 = reader.readNextEvent(1000); + assertTrue(event3.getEvent().getObject() instanceof JsonNode); + + client.removeGroup(groupId); + streamManager.sealStream(scope, stream); + streamManager.deleteStream(scope, stream); + } + } + } + + @Data + @AllArgsConstructor + @NoArgsConstructor + static class TestClass { + private String test; + } + + @Test + public void testWithSchema() { + String scope = "withSchema"; + String stream = "withSchema"; + + try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) { + streamManager.createScope(scope); + streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); + + SerializerConfig serializerConfig = SerializerConfig.builder() + .namespace(scope) + .groupId(stream) + .createGroup(SerializationFormat.Json, + Compatibility.allowAny(), + false) + .registerSchema(true) + .registryClient(client) + .build(); + + // region write + Serializer serializer = JsonSerializerFactory.serializer(serializerConfig, JSONSchema.of(TestClass.class)); + + try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) { + + EventStreamWriter streamWriter = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build()); + streamWriter.writeEvent(new TestClass("a")).join(); + // endregion + + // region read + Serializer> deserializer = SerializerFactory.deserializerWithSchema(serializerConfig); + Function eventContentSupplier = x -> ((WithSchema) x.getEvent()).getJsonString(); + + ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, + new SocketConnectionFactoryImpl(clientConfig)); + String rg = "rg" + stream; + readerGroupManager.createReaderGroup(rg, + ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build()); + + EventStreamReader> reader = clientFactory.createReader("r", rg, deserializer, ReaderConfig.builder().build()); + EventRead> eventRead = reader.readNextEvent(1000L); + String eventContent = eventContentSupplier.apply(eventRead); + assertFalse(Strings.isNullOrEmpty(eventContent)); + reader.close(); + + streamWriter.writeEvent(new TestClass("a")).join(); + Serializer deserializer2 = SerializerFactory.deserializeAsJsonString(serializerConfig); + EventStreamReader reader2 = clientFactory.createReader("r2", rg, deserializer2, ReaderConfig.builder().build()); + EventRead eventRead2 = reader2.readNextEvent(1000L); + assertFalse(Strings.isNullOrEmpty(eventRead2.getEvent())); + reader2.close(); + + streamWriter.writeEvent(new TestClass("a")).join(); + Serializer deserializer3 = SerializerFactory.deserializeAsT(serializerConfig, (x, y) -> y.toString()); + EventStreamReader reader3 = clientFactory.createReader("r3", rg, deserializer3, ReaderConfig.builder().build()); + EventRead eventRead3 = reader3.readNextEvent(1000L); + assertFalse(Strings.isNullOrEmpty(eventRead3.getEvent())); + } + } + } +} + diff --git a/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestPravegaEndToEnd.java b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestPravegaEndToEnd.java new file mode 100644 index 000000000..82b293e0a --- /dev/null +++ b/test/src/test/java/io/pravega/schemaregistry/integrationtest/TestPravegaEndToEnd.java @@ -0,0 +1,35 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.integrationtest; + +import io.pravega.client.ClientConfig; +import io.pravega.schemaregistry.pravegastandalone.PravegaStandaloneUtils; +import io.pravega.schemaregistry.storage.SchemaStore; +import io.pravega.schemaregistry.storage.SchemaStoreFactory; +import org.junit.Before; + +import java.net.URI; + +public class TestPravegaEndToEnd extends TestEndToEnd { + private ClientConfig clientConfig; + + @Before + public void startPravega() { + PravegaStandaloneUtils pravegaStandaloneUtils = PravegaStandaloneUtils.startPravega(); + clientConfig = ClientConfig.builder().controllerURI(URI.create(pravegaStandaloneUtils.getControllerURI())).build(); + } + + SchemaStore getStore() { + if (clientConfig == null) { + startPravega(); + } + return SchemaStoreFactory.createPravegaStore(clientConfig, executor); + } +} diff --git a/test/src/test/resources/proto/protobufTest.pb b/test/src/test/resources/proto/protobufTest.pb new file mode 100644 index 000000000..b0226bf03 Binary files /dev/null and b/test/src/test/resources/proto/protobufTest.pb differ