diff --git a/build.gradle b/build.gradle index 85fbef6c4..ab723b7dd 100644 --- a/build.gradle +++ b/build.gradle @@ -215,6 +215,40 @@ def getProjectVersion() { return ver } +project('serializers') { + dependencies { + compile project(':common') + compile project(':client') + compile group: 'org.apache.avro', name: 'avro', version: avroVersion + compile group: 'com.google.protobuf', name: 'protobuf-java', version: protobufProtocVersion + compile group: 'com.google.protobuf', name: 'protobuf-java-util', version: protobufUtilVersion + compile group: 'io.pravega', name: 'pravega-client', version: pravegaVersion + compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion + compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-jsonSchema', version: jacksonVersion + compile group: 'com.github.everit-org.json-schema', name: 'org.everit.json.schema', version: everitVersion + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Serializers" + dependsOn delombok + source = delombok.outputDir + failOnError = true + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } + + jar { + manifest {} + + from { + configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } + } + } +} + project('server') { sourceSets { main.resources.srcDirs += "$projectDir/src/conf" @@ -311,6 +345,15 @@ distributions { from 'NOTICE' } } + serializers { + baseName = "schema-registry-serializers" + contents { + from { project(":serializers").configurations.runtime } + from { project(":serializers").configurations.runtime.allArtifacts.files } + from 'LICENSE' + from 'NOTICE' + } + } } task sourceCopy(type: Copy) { @@ -350,6 +393,7 @@ task publishAllJars() { dependsOn ':common:publish' dependsOn ':contract:publish' dependsOn ':server:publish' + dependsOn ':serializers:publish' } task prepareRegistryImage(type: Copy) { diff --git a/checkstyle/spotbugs-exclude.xml b/checkstyle/spotbugs-exclude.xml index 4500e3dec..31efb43d4 100644 --- a/checkstyle/spotbugs-exclude.xml +++ b/checkstyle/spotbugs-exclude.xml @@ -5,6 +5,9 @@ + + + diff --git a/gradle.properties b/gradle.properties index eb0c2841f..3493130ec 100644 --- a/gradle.properties +++ b/gradle.properties @@ -26,7 +26,7 @@ gradleLombokPluginVersion=3.2.0 gradleSshPluginVersion=2.9.0 guavaVersion=28.1-jre javaxServletApiVersion=4.0.0 -jacksonVersion=2.10.3 +jacksonVersion=2.11.1 everitVersion=1.12.1 javaxwsrsApiVersion=2.1 jaxbVersion=2.3.0 @@ -50,7 +50,7 @@ pravegaVersion=0.8.0-2591.37c5082-SNAPSHOT pravegaKeyCloakVersion=0.7.0 # Version and base tags can be overridden at build time -schemaregistryVersion=0.0.1-SNAPSHOT +schemaregistryVersion=0.1.0-SNAPSHOT schemaregistryBaseTag=pravega/schemaregistry # Pravega Signing Key diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java new file mode 100644 index 000000000..b2e1f26ff --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java @@ -0,0 +1,30 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import io.pravega.schemaregistry.contract.data.CodecType; +import io.pravega.schemaregistry.contract.data.EncodingInfo; + +/** + * Codec interface extends {@link Encoder} and {@link Decoder} interfaces that defines methods to encode and decode + * data. Encoder interface takes a codec type and encoding function. Decoder interface defines a decoding function. + */ +public interface Codec extends Encoder, Decoder { + /** + * Name identifying the Codec Type. + * This name should be same as the {@link CodecType#getName()} that is registered for the group in schema registry + * service. + * The deserializers will find the decoder for the encoded data from {@link EncodingInfo#getCodecType()} by matching + * the name. + * + * @return Name of the codec. + */ + String getName(); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java new file mode 100644 index 000000000..5f5155c05 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -0,0 +1,146 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream; +import io.pravega.schemaregistry.contract.data.CodecType; +import lombok.Getter; +import org.apache.commons.io.IOUtils; +import org.xerial.snappy.Snappy; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +/** + * Utility class for creating codecs for none, snappy or gzip. + */ +public enum Codecs { + None(Constants.NOOP), + GzipCompressor(Constants.GZIP_CODEC), + SnappyCompressor(Constants.SNAPPY_CODEC); + + @Getter + private final Codec codec; + + Codecs(Codec codec) { + this.codec = codec; + } + + private static class Noop implements Codec { + private static final CodecType CODEC_TYPE_NONE = new CodecType(Constants.NONE); + + @Override + public String getName() { + return CODEC_TYPE_NONE.getName(); + } + + @Override + public CodecType getCodecType() { + return CODEC_TYPE_NONE; + } + + @Override + public void encode(ByteBuffer data, ByteArrayOutputStream bos) { + if (data.hasArray()) { + bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); + } else { + byte[] b = getBytes(data); + bos.write(b, 0, b.length); + } + } + + @Override + public ByteBuffer decode(ByteBuffer data, Map codecProperties) { + return data; + } + } + + private static class GZipCodec implements Codec { + private static final CodecType CODEC_TYPE_GZIP = new CodecType(Constants.APPLICATION_X_GZIP); + @Override + public String getName() { + return CODEC_TYPE_GZIP.getName(); + } + + @Override + public CodecType getCodecType() { + return CODEC_TYPE_GZIP; + } + + @Override + public void encode(ByteBuffer data, ByteArrayOutputStream bos) throws IOException { + byte[] b = data.hasArray() ? data.array() : getBytes(data); + int offset = data.hasArray() ? data.arrayOffset() + data.position() : 0; + try (GZIPOutputStream gzipOS = new GZIPOutputStream(bos)) { + gzipOS.write(b, offset, data.remaining()); + } + } + + @Override + public ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException { + InputStream bis = new ByteBufferBackedInputStream(data); + return ByteBuffer.wrap(IOUtils.toByteArray(new GZIPInputStream(bis))); + } + } + + private static byte[] getBytes(ByteBuffer data) { + byte[] b = new byte[data.remaining()]; + data.get(b); + return b; + } + + private static class SnappyCodec implements Codec { + private static final CodecType CODEC_TYPE_SNAPPY = new CodecType(Constants.APPLICATION_X_SNAPPY_FRAMED); + @Override + public String getName() { + return CODEC_TYPE_SNAPPY.getName(); + } + + @Override + public CodecType getCodecType() { + return CODEC_TYPE_SNAPPY; + } + + @Override + public void encode(ByteBuffer data, ByteArrayOutputStream bos) throws IOException { + int capacity = Snappy.maxCompressedLength(data.remaining()); + byte[] encoded = new byte[capacity]; + + byte[] b = data.hasArray() ? data.array() : getBytes(data); + int offset = data.hasArray() ? data.arrayOffset() + data.position() : 0; + int size = Snappy.compress(b, offset, data.remaining(), encoded, 0); + bos.write(encoded, 0, size); + } + + @Override + public ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException { + byte[] b = data.hasArray() ? data.array() : getBytes(data); + int offset = data.hasArray() ? data.arrayOffset() + data.position() : 0; + + ByteBuffer decoded = ByteBuffer.allocate(Snappy.uncompressedLength(b, offset, data.remaining())); + Snappy.uncompress(b, offset, data.remaining(), decoded.array(), 0); + return decoded; + } + } + + static class Constants { + static final Noop NOOP = new Noop(); + static final GZipCodec GZIP_CODEC = new GZipCodec(); + static final SnappyCodec SNAPPY_CODEC = new SnappyCodec(); + static final String NONE = ""; + static final String APPLICATION_X_GZIP = "application/x-gzip"; + static final String APPLICATION_X_SNAPPY_FRAMED = "application/x-snappy-framed"; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java new file mode 100644 index 000000000..5c0d7f3a8 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java @@ -0,0 +1,31 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; + +/** + * Decoder interface that defines method to decode data. + */ +@FunctionalInterface +public interface Decoder { + /** + * Implementation should decode the remaining bytes in the buffer and return a new ByteBuffer that includes + * the decoded data at its current position. + * + * @param data encoded ByteBuffer to decode. + * @param codecProperties codec properties. + * @return decoded ByteBuffer with position set to the start of decoded data. + * @throws IOException can be thrown while reading from or writing to byte buffers. + */ + ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java new file mode 100644 index 000000000..e6e9764e8 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java @@ -0,0 +1,40 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import io.pravega.schemaregistry.contract.data.CodecType; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Defines method to encode data. + */ +public interface Encoder { + /** + * Codec type for the encoder. + * + * @return Codec Type for the encoder. + */ + CodecType getCodecType(); + + /** + * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes + * the encoded data at its current position. + * + * The implementation can optionally call flush or close on outputstream with no consequence. + * + * @param data ByteBuffer to encode. + * @param outputStream ByteArrayOutputStream where the encoded data should be written. + * @throws IOException IOException can be thrown while reading from or writing to byte buffers. + */ + void encode(ByteBuffer data, ByteArrayOutputStream outputStream) throws IOException; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java new file mode 100644 index 000000000..4fccf058b --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -0,0 +1,141 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.Getter; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.specific.SpecificRecordBase; + +import java.nio.ByteBuffer; + +/** + * Container class for Avro Schema. + * + * @param Type of element. + */ +public class AvroSchema implements Schema { + @Getter + private final org.apache.avro.Schema schema; + private final SchemaInfo schemaInfo; + @Getter + private final Class tClass; + + private AvroSchema(org.apache.avro.Schema schema, Class tClass) { + this.schema = schema; + this.schemaInfo = new SchemaInfo(schema.getFullName(), + SerializationFormat.Avro, getSchemaBytes(), ImmutableMap.of()); + this.tClass = tClass; + } + + private AvroSchema(SchemaInfo schemaInfo) { + String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); + this.schema = new org.apache.avro.Schema.Parser().parse(schemaString); + this.schemaInfo = schemaInfo; + this.tClass = null; + } + + /** + * Method to create a typed AvroSchema for the given class. It extracts the avro schema from the class. + * For Avro generated classes, the schema is retrieved from the class. + * For POJOs the schema is extracted using avro's {@link ReflectData}. + * + * @param tClass Class whose object's schema is used. + * @param Type of the Java class. + * @return {@link AvroSchema} with generic type T that extracts and captures the avro schema. + */ + public static AvroSchema of(Class tClass) { + org.apache.avro.Schema schema; + if (SpecificRecordBase.class.isAssignableFrom(tClass)) { + schema = SpecificData.get().getSchema(tClass); + } else { + schema = ReflectData.get().getSchema(tClass); + } + return new AvroSchema<>(schema, tClass); + } + + /** + * Method to create a typed AvroSchema of type {@link Object} from the given schema. + * This schema can be used to express any non record schema. + * + * @param schema Schema to use. + * @return Returns an AvroSchema with {@link Object} type. + */ + public static AvroSchema of(org.apache.avro.Schema schema) { + return new AvroSchema<>(schema, Object.class); + } + + /** + * Method to create a typed AvroSchema of type {@link GenericRecord} from the given schema. + * + * @param schema Schema to use. + * @return Returns an AvroSchema with {@link GenericRecord} type. + */ + public static AvroSchema ofRecord(org.apache.avro.Schema schema) { + Preconditions.checkArgument(schema.getType().equals(org.apache.avro.Schema.Type.RECORD)); + return new AvroSchema<>(schema, GenericRecord.class); + } + + /** + * It is same as {@link #of(Class)} except that it generates an AvroSchema typed as T. + * + * This is useful for supplying a map of Avro schemas for multiplexed serializers and deserializers. + * + * @param tDerived Class whose schema should be used. + * @param tBase Base class for the typed AvroSchema object. + * @param Type of base class. + * @return Returns an AvroSchema with T type. + */ + public static AvroSchema ofBaseType(Class tDerived, Class tBase) { + Preconditions.checkArgument(tBase.isAssignableFrom(tDerived)); + + return new AvroSchema<>(ReflectData.get().getSchema(tDerived), tBase); + } + + /** + * It is same as {@link #of(Class)} except that it generates an AvroSchema typed as {@link SpecificRecordBase}. + * + * This is useful for supplying a map of Avro schemas for multiplexed serializers and deserializers. + * + * @param tClass Class whose schema should be used. + * @param Type of class whose schema is to be used. + * @return Returns an AvroSchema with {@link SpecificRecordBase} type. + */ + public static AvroSchema ofSpecificRecord(Class tClass) { + Preconditions.checkArgument(SpecificRecordBase.class.isAssignableFrom(tClass)); + + return new AvroSchema<>(SpecificData.get().getSchema(tClass), SpecificRecordBase.class); + } + + /** + * Method to create a typed AvroSchema of type {@link Object} from schema info. + * + * @param schemaInfo Schema info object that has schema data in binary form. + * @return Returns an AvroSchema with {@link Object} type. + */ + public static AvroSchema from(SchemaInfo schemaInfo) { + return new AvroSchema<>(schemaInfo); + } + + private ByteBuffer getSchemaBytes() { + return ByteBuffer.wrap(schema.toString().getBytes(Charsets.UTF_8)); + } + + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java new file mode 100644 index 000000000..c0206d7e2 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -0,0 +1,193 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator; +import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.Getter; +import org.apache.avro.specific.SpecificRecordBase; +import org.everit.json.schema.loader.SchemaLoader; +import org.everit.json.schema.loader.SpecificationVersion; +import org.json.JSONObject; +import org.json.JSONTokener; + +import java.nio.ByteBuffer; + +/** + * Container class for Json Schema. + * + * @param Type of element. + */ +public class JSONSchema implements Schema { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @Getter + private final String schemaString; + private final Class base; + @Getter + private final Class derived; + + @Getter + private final org.everit.json.schema.Schema schema; + + private final SchemaInfo schemaInfo; + + private JSONSchema(String name, String schemaString, Class derived) { + this(name, schemaString, derived, derived); + } + + private JSONSchema(String name, String schemaString, Class base, Class derived) { + this.schemaString = schemaString; + this.schemaInfo = new SchemaInfo(name, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); + this.base = base; + this.derived = derived; + this.schema = getSchemaObj(schemaString); + } + + private JSONSchema(SchemaInfo schemaInfo, String schemaString, Class derived) { + this.schemaString = schemaString; + this.schemaInfo = schemaInfo; + this.base = derived; + this.derived = derived; + this.schema = getSchemaObj(schemaString); + } + + /** + * Method to create a typed JSONSchema for the given class. It extracts the json schema from the class. + * For POJOs the schema is extracted using jackson's {@link JsonSchemaGenerator}. + * + * @param tClass Class whose object's schema is used. + * @param Type of the Java class. + * @return {@link JSONSchema} with generic type T that extracts and captures the json schema. + */ + public static JSONSchema of(Class tClass) { + Preconditions.checkNotNull(tClass); + try { + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); + JsonSchema schema = schemaGen.generateSchema(tClass); + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); + return new JSONSchema<>(tClass.getName(), schemaString, tClass); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("Unable to get json schema from the class", e); + } + } + + /** + * Method to create a typed JSONSchema of type T from the given schema. + * This method can be used to pass Json schema string which can be used to represent primitive data types. + * + * @param type type of object identified by {@link SchemaInfo#getType()}. + * @param schema Schema to use. + * @param tClass class for the type of object + * @param Type of object + * @return Returns an JSONSchema with {@link Object} type. + */ + public static JSONSchema of(String type, JsonSchema schema, Class tClass) { + Preconditions.checkNotNull(type); + Preconditions.checkNotNull(schema); + try { + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); + + return new JSONSchema<>(type, schemaString, tClass); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("Unable to get json schema string from the JsonSchema object", e); + } + } + + /** + * Method to create a typed JSONSchema of type T from the given schema string. + * + * @param type type of object identified by {@link SchemaInfo#getType()}. + * @param schemaString Schema string to use. + * @param tClass class for the type of object + * @param Type of object + * @return Returns an JSONSchema with {@link Object} type. + */ + public static JSONSchema of(String type, String schemaString, Class tClass) { + Preconditions.checkNotNull(type, "Type cannot be null."); + Preconditions.checkArgument(!Strings.isNullOrEmpty(schemaString), "Schema String cannot be null or empty."); + return new JSONSchema<>(type, schemaString, tClass); + } + + /** + * It is same as {@link #of(Class)} except that it generates an JSONSchema typed as supplied base type T. + * + * This is useful for supplying a map of POJO schemas for multiplexed serializers and deserializers. + * + * @param tBase Base class whose type is used in the JSON schema object. + * @param tDerived Class whose schema should be used. + * @param Type of base class. + * @return Returns an AvroSchema with {@link SpecificRecordBase} type. + */ + public static JSONSchema ofBaseType(Class tDerived, Class tBase) { + Preconditions.checkNotNull(tDerived); + Preconditions.checkNotNull(tBase); + try { + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); + JsonSchema jsonSchema = schemaGen.generateSchema(tDerived); + String schemaString = OBJECT_MAPPER.writeValueAsString(jsonSchema); + + return new JSONSchema<>(tDerived.getName(), schemaString, tBase, tDerived); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("Unable to get json schema from the class", e); + } + } + + /** + * Method to create a typed JSONSchema of type {@link JsonNode} from the given schema. + * + * @param schemaInfo Schema info to translate into json schema. + * @return Returns an JSONSchema with {@link JsonNode} type. + */ + public static JSONSchema from(SchemaInfo schemaInfo) { + Preconditions.checkNotNull(schemaInfo); + String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); + + return new JSONSchema<>(schemaInfo, schemaString, JsonNode.class); + } + + private static org.everit.json.schema.Schema getSchemaObj(String schemaString) { + JSONObject rawSchema = new JSONObject(new JSONTokener(schemaString)); + // It will check if the schema has "id" then it is definitely version 4. + // if $schema draft is specified, the schemaloader will automatically use the correct specification version + // however, $schema is not mandatory. So we will check with presence of id and if id is specified with draft 4 + // specification, then we use draft 4, else we will use draft 7 as other keywords are added in draft 7. + if (rawSchema.has(SpecificationVersion.DRAFT_4.idKeyword())) { + return SchemaLoader.builder().useDefaults(true).schemaJson(rawSchema) + .build().load().build(); + } else { + return SchemaLoader.builder().useDefaults(true).schemaJson(rawSchema).draftV7Support() + .build().load().build(); + } + } + + private ByteBuffer getSchemaBytes() { + return ByteBuffer.wrap(schemaString.getBytes(Charsets.UTF_8)); + } + + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + @Override + public Class getTClass() { + return base; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java new file mode 100644 index 000000000..990763c34 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -0,0 +1,199 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.Message; +import com.google.protobuf.Parser; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.AccessLevel; +import lombok.Data; +import lombok.Getter; + +import java.lang.reflect.InvocationTargetException; +import java.nio.ByteBuffer; + +import static com.google.protobuf.DescriptorProtos.*; + +/** + * Container class for protobuf schema. + * Protobuf schemas are represented using {@link FileDescriptorSet}. + * + * @param Type of element. + */ +@Data +public class ProtobufSchema implements Schema { + @Getter + private final Parser parser; + @Getter + private final Class tClass; + @Getter + private final FileDescriptorSet fileDescriptorSet; + + private final SchemaInfo schemaInfo; + + private ProtobufSchema(String name, Parser parser, Class tClass, FileDescriptorSet fileDescriptorSet) { + this.parser = parser; + this.tClass = tClass; + this.fileDescriptorSet = fileDescriptorSet; + this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); + } + + private ProtobufSchema(FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo, Class tClass) { + this.parser = null; + this.tClass = null; + this.fileDescriptorSet = fileDescriptorSet; + this.schemaInfo = schemaInfo; + } + + private ByteBuffer getSchemaBytes() { + return ByteBuffer.wrap(fileDescriptorSet.toByteArray()); + } + + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class. + * If the description of protobuf object is contained in a single .proto file, then this method creates the + * {@link FileDescriptorSet} from the generated class. + * + * @param tClass Class for code generated protobuf message. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type T that captures protobuf schema and parser. + */ + public static ProtobufSchema of(Class tClass) { + Extractor extractor = new Extractor<>(tClass).invoke(); + + return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), tClass, + extractor.getFileDescriptorSet()); + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class and {@link FileDescriptorSet}. + * + * @param tClass Class for code generated protobuf message. + * @param fileDescriptorSet file descriptor set representing a protobuf schema. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type T that captures protobuf schema and parser. + */ + public static ProtobufSchema of(Class tClass, FileDescriptorSet fileDescriptorSet) { + Extractor extractor = new Extractor<>(tClass).invoke(); + return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), tClass, fileDescriptorSet); + } + + /** + * Method to generate protobuf schema of generic type {@link DynamicMessage} using the {@link FileDescriptorSet}. + * It is for representing protobuf schemas to be used for generic deserialization of protobuf serialized payload into + * {@link DynamicMessage}. + * Note: this does not have a protobuf parser and can only be used during deserialization. + * + * @param name Name of protobuf message + * @param fileDescriptorSet file descriptor set representing a protobuf schema. + * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. + */ + public static ProtobufSchema of(String name, FileDescriptorSet fileDescriptorSet) { + return new ProtobufSchema<>(name, null, DynamicMessage.class, fileDescriptorSet); + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class and {@link FileDescriptorSet}. + * It is same as {@link #of(Class, FileDescriptorSet)} except that it returns a Protobuf schema + * typed {@link GeneratedMessageV3}. + * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. + * + * @param tClass Class for code generated protobuf message. + * @param fileDescriptorSet file descriptor set representing a protobuf schema. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type {@link GeneratedMessageV3} that captures protobuf schema and parser of type T. + */ + @SuppressWarnings("unchecked") + public static ProtobufSchema ofGeneratedMessageV3( + Class tClass, FileDescriptorSet fileDescriptorSet) { + Extractor extractor = new Extractor<>(tClass).invoke(); + + return new ProtobufSchema<>(extractor.getFullName(), (Parser) extractor.getParser(), GeneratedMessageV3.class, fileDescriptorSet); + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class. It creates the {@link FileDescriptorSet} + * from the generated class. + * This method is same as {@link #of(Class)} except that it returns a Protobuf schema + * typed {@link GeneratedMessageV3}. + * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. + * + * @param tClass Class for code generated protobuf message. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type {@link GeneratedMessageV3} that captures protobuf schema and parser of type T. + */ + @SuppressWarnings("unchecked") + public static ProtobufSchema ofGeneratedMessageV3(Class tClass) { + Extractor extractor = new Extractor<>(tClass).invoke(); + + return new ProtobufSchema<>(extractor.getFullName(), + (Parser) extractor.getParser(), GeneratedMessageV3.class, extractor.getFileDescriptorSet()); + } + + /** + * Method to generate protobuf schema of generic type {@link DynamicMessage} from schemaInfo {@link SchemaInfo}. + * + * @param schemaInfo Schema Info + * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. + */ + public static ProtobufSchema from(SchemaInfo schemaInfo) { + try { + FileDescriptorSet fileDescriptorSet = FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); + + return new ProtobufSchema<>(fileDescriptorSet, schemaInfo, DynamicMessage.class); + } catch (InvalidProtocolBufferException ex) { + throw new IllegalArgumentException("Unable to get protobuf schema from schemainfo", ex); + } + } + + private static class Extractor { + @Getter(AccessLevel.PRIVATE) + private Class tClass; + @Getter(AccessLevel.PRIVATE) + private T defaultInstance; + @Getter(AccessLevel.PRIVATE) + private Parser parser; + + Extractor(Class tClass) { + this.tClass = tClass; + } + + String getFullName() { + return defaultInstance.getDescriptorForType().getFullName(); + } + + FileDescriptorSet getFileDescriptorSet() { + return FileDescriptorSet + .newBuilder().addFile(defaultInstance.getDescriptorForType().getFile().toProto()).build(); + } + + @SuppressWarnings("unchecked") + Extractor invoke() { + try { + defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new IllegalArgumentException(e); + } + parser = (Parser) defaultInstance.getParserForType(); + return this; + } + } +} + diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java new file mode 100644 index 000000000..a498779eb --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java @@ -0,0 +1,34 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +/** + * Interface for container class for schemas for different serialization formats. + * + * @param Type of object. + */ +public interface Schema { + /** + * Returns the {@link SchemaInfo} object that is computed from the schema object. SchemaInfo is the object that encapsulates + * all schema metadata to be shared with the schema registry service. + * + * @return Schema Info object derived from the schema object. + */ + SchemaInfo getSchemaInfo(); + + /** + * Class for the Type of object. + * + * @return Class of type T + */ + Class getTClass(); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java new file mode 100644 index 000000000..eb3e13091 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -0,0 +1,122 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +@Slf4j +abstract class AbstractDeserializer extends BaseDeserializer { + private static final int HEADER_SIZE = 1 + Integer.BYTES; + + private final String groupId; + private final SchemaRegistryClient client; + // This can be null. If no schema is supplied, it means the intent is to deserialize into writer schema. + // If headers are not encoded, then this will be the latest schema from the registry + private final SchemaInfo schemaInfo; + private final boolean encodeHeader; + private final SerializerConfig.Decoders decoders; + private final boolean skipHeaders; + private final EncodingCache encodingCache; + + protected AbstractDeserializer(String groupId, + SchemaRegistryClient client, + @Nullable Schema schema, + boolean skipHeaders, + SerializerConfig.Decoders decoders, + EncodingCache encodingCache, + boolean encodeHeader) { + Preconditions.checkNotNull(groupId); + Preconditions.checkNotNull(client); + Preconditions.checkNotNull(encodingCache); + this.groupId = groupId; + this.client = client; + this.encodingCache = encodingCache; + this.schemaInfo = schema == null ? null : schema.getSchemaInfo(); + this.encodeHeader = encodeHeader; + this.skipHeaders = skipHeaders; + this.decoders = decoders; + + initialize(); + } + + private void initialize() { + if (schemaInfo != null) { + log.info("Validate caller supplied schema."); + if (!client.canReadUsing(groupId, schemaInfo)) { + throw new IllegalArgumentException("Cannot read using schema" + schemaInfo.getType() + " as it is considered incompatible with current policy."); + } + } else { + if (!this.encodeHeader) { + log.warn("No reader schema is supplied and stream does not have encoding headers."); + } + } + } + + @SneakyThrows(IOException.class) + @Override + public T deserialize(ByteBuffer data) { + int start = data.hasArray() ? data.arrayOffset() + data.position() : data.position(); + ByteArrayInputStream inputStream; + SchemaInfo writerSchema; + SchemaInfo readerSchema; + if (this.encodeHeader) { + ByteBuffer decoded; + if (skipHeaders) { + data.position(start + HEADER_SIZE); + decoded = data; + writerSchema = null; + } else { + byte protocol = data.get(); + EncodingId encodingId = new EncodingId(data.getInt()); + EncodingInfo encodingInfo = encodingCache.getGroupEncodingInfo(encodingId); + writerSchema = encodingInfo.getSchemaInfo(); + decoded = decoders.decode(encodingInfo.getCodecType(), data); + } + + inputStream = new ByteArrayInputStream(decoded.array(), + decoded.arrayOffset() + decoded.position(), decoded.remaining()); + // pass writer schema for schema to be read into + readerSchema = schemaInfo == null ? writerSchema : schemaInfo; + } else { + byte[] b; + if (data.hasArray()) { + b = data.array(); + } else { + b = new byte[data.remaining()]; + data.get(b); + } + writerSchema = null; + readerSchema = schemaInfo; + // pass reader schema for schema on read to the underlying implementation + inputStream = new ByteArrayInputStream(b, start, data.remaining()); + } + + return deserialize(inputStream, writerSchema, readerSchema); + } + + protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException; + + boolean isEncodeHeader() { + return encodeHeader; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java new file mode 100644 index 000000000..66d462d43 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -0,0 +1,106 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.common.io.EnhancedByteArrayOutputStream; +import io.pravega.common.util.BitConverter; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codecs; +import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.Getter; +import lombok.SneakyThrows; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.concurrent.atomic.AtomicReference; + +abstract class AbstractSerializer extends BaseSerializer { + private static final byte PROTOCOL = 0x1; + + private final String groupId; + + private final SchemaInfo schemaInfo; + private final AtomicReference encodingId; + private final boolean encodeHeader; + private final SchemaRegistryClient client; + @Getter + private final Encoder encoder; + private final boolean registerSchema; + + protected AbstractSerializer(String groupId, + SchemaRegistryClient client, + Schema schema, + Encoder encoder, + boolean registerSchema, + boolean encodeHeader) { + Preconditions.checkNotNull(groupId); + Preconditions.checkNotNull(client); + Preconditions.checkNotNull(encoder); + Preconditions.checkNotNull(schema); + Preconditions.checkArgument(encodeHeader || encoder.equals(Codecs.None.getCodec()), + "Cannot use encoder if encoder header is false."); + this.groupId = groupId; + this.client = client; + this.schemaInfo = schema.getSchemaInfo(); + this.registerSchema = registerSchema; + this.encodingId = new AtomicReference<>(); + this.encoder = encoder; + this.encodeHeader = encodeHeader; + initialize(); + } + + private void initialize() { + VersionInfo version; + if (registerSchema) { + // register schema + version = client.addSchema(groupId, schemaInfo); + } else { + // get already registered schema version. If schema is not registered, this will throw an exception. + version = client.getVersionForSchema(groupId, schemaInfo); + } + if (encodeHeader) { + encodingId.set(client.getEncodingId(groupId, version, encoder.getCodecType().getName())); + } + } + + @SneakyThrows(IOException.class) + @Override + public ByteBuffer serialize(T obj) { + EnhancedByteArrayOutputStream outStream = new EnhancedByteArrayOutputStream(); + ByteBuffer byteBuffer; + if (this.encodeHeader) { + outStream.write(PROTOCOL); + BitConverter.writeInt(outStream, encodingId.get().getId()); + } + + if (!this.encodeHeader || this.encoder.equals(Codecs.None.getCodec())) { + // write serialized data to the output stream + serialize(obj, schemaInfo, outStream); + } else { + // encode header is true and encoder is supplied, encode the data + EnhancedByteArrayOutputStream serializedStream = new EnhancedByteArrayOutputStream(); + + serialize(obj, schemaInfo, serializedStream); + encoder.encode(ByteBuffer.wrap(serializedStream.getData().array()), outStream); + } + + byteBuffer = ByteBuffer.wrap(outStream.getData().array(), 0, outStream.getData().getLength()); + + return byteBuffer; + } + + protected abstract void serialize(T var, SchemaInfo schema, OutputStream outputStream) throws IOException; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java new file mode 100644 index 000000000..a8db6971b --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java @@ -0,0 +1,63 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import org.apache.avro.Schema; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.reflect.ReflectDatumReader; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.avro.specific.SpecificRecordBase; + +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.ConcurrentHashMap; + +class AvroDeserializer extends AbstractDeserializer { + private final AvroSchema avroSchema; + private final ConcurrentHashMap knownSchemas; + + AvroDeserializer(String groupId, SchemaRegistryClient client, + AvroSchema schema, + SerializerConfig.Decoders decoder, EncodingCache encodingCache) { + super(groupId, client, schema, false, decoder, encodingCache, true); + Preconditions.checkNotNull(schema); + this.avroSchema = schema; + this.knownSchemas = new ConcurrentHashMap<>(); + } + + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + Preconditions.checkNotNull(writerSchemaInfo); + Schema writerSchema; + if (knownSchemas.containsKey(writerSchemaInfo)) { + writerSchema = knownSchemas.get(writerSchemaInfo); + } else { + String schemaString = new String(writerSchemaInfo.getSchemaData().array(), Charsets.UTF_8); + writerSchema = new Schema.Parser().parse(schemaString); + knownSchemas.put(writerSchemaInfo, writerSchema); + } + Schema readerSchema = avroSchema.getSchema(); + BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); + + if (SpecificRecordBase.class.isAssignableFrom(avroSchema.getTClass())) { + SpecificDatumReader datumReader = new SpecificDatumReader<>(writerSchema, readerSchema); + return datumReader.read(null, decoder); + } else { + ReflectDatumReader datumReader = new ReflectDatumReader<>(writerSchema, readerSchema); + return datumReader.read(null, decoder); + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java new file mode 100644 index 000000000..77af89edf --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java @@ -0,0 +1,46 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DecoderFactory; + +import javax.annotation.Nullable; +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.ConcurrentHashMap; + +class AvroGenericDeserializer extends AbstractDeserializer { + private final ConcurrentHashMap knownSchemas; + + AvroGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, + SerializerConfig.Decoders decoder, EncodingCache encodingCache) { + super(groupId, client, schema, false, decoder, encodingCache, true); + this.knownSchemas = new ConcurrentHashMap<>(); + } + + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + Preconditions.checkNotNull(writerSchemaInfo); + Schema writerSchema = knownSchemas.computeIfAbsent(writerSchemaInfo, x -> AvroSchema.from(x).getSchema()); + Schema readerSchema = knownSchemas.computeIfAbsent(readerSchemaInfo, x -> AvroSchema.from(x).getSchema()); + + GenericDatumReader genericDatumReader = new GenericDatumReader<>(writerSchema, readerSchema); + + BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); + return genericDatumReader.read(null, decoder); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java new file mode 100644 index 000000000..99231a11c --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java @@ -0,0 +1,58 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericDatumWriter; +import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.reflect.ReflectDatumWriter; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.avro.specific.SpecificRecord; + +import java.io.IOException; +import java.io.OutputStream; + +class AvroSerializer extends AbstractSerializer { + private final AvroSchema avroSchema; + AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, + Encoder encoder, boolean registerSchema) { + super(groupId, client, schema, encoder, registerSchema, true); + this.avroSchema = schema; + } + + @Override + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { + Schema schema = avroSchema.getSchema(); + + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null); + + if (IndexedRecord.class.isAssignableFrom(var.getClass())) { + if (SpecificRecord.class.isAssignableFrom(var.getClass())) { + SpecificDatumWriter writer = new SpecificDatumWriter<>(schema); + writer.write(var, encoder); + } else { + GenericDatumWriter writer = new GenericDatumWriter<>(schema); + writer.write(var, encoder); + } + } else { + ReflectDatumWriter writer = new ReflectDatumWriter<>(schema); + writer.write(var, encoder); + } + + encoder.flush(); + outputStream.flush(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java new file mode 100644 index 000000000..5e9cb8170 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java @@ -0,0 +1,113 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.schemas.AvroSchema; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for Avro serializers and deserializers. + */ +@Slf4j +class AvroSerializerFactory { + static Serializer serializer(SerializerConfig config, AvroSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + String groupId = config.getGroupId(); + return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), config.isRegisterSchema()); + } + + static Serializer deserializer(SerializerConfig config, AvroSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AvroDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); + } + + static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AvroGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); + } + + static Serializer multiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + Map, AbstractSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), + config.isRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + static Serializer multiTypeDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), + encodingCache); + } + + static Serializer> typedOrGenericDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); + AbstractDeserializer genericDeserializer = new AvroGenericDeserializer(groupId, schemaRegistryClient, + null, config.getDecoders(), encodingCache); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoders(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java new file mode 100644 index 000000000..015a2036f --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; + +abstract class BaseDeserializer implements Serializer { + @Override + public final ByteBuffer serialize(T value) { + throw new UnsupportedOperationException(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java new file mode 100644 index 000000000..8b7844fc2 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; + +abstract class BaseSerializer implements Serializer { + @Override + public final T deserialize(ByteBuffer serializedValue) { + throw new UnsupportedOperationException(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java new file mode 100644 index 000000000..a3575c87e --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java @@ -0,0 +1,18 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.InputStream; + +public interface CustomDeserializer { + T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java new file mode 100644 index 000000000..7a508b734 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java @@ -0,0 +1,18 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.OutputStream; + +public interface CustomSerializer { + void serialize(T var, SchemaInfo schema, OutputStream outputStream); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java new file mode 100644 index 000000000..380a067af --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java @@ -0,0 +1,64 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.io.InputStream; +import java.io.OutputStream; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for Custom serializers and deserializers. + */ +@Slf4j +class CustomSerializerFactory { + static Serializer serializer(SerializerConfig config, Schema schema, CustomSerializer serializer) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + Preconditions.checkNotNull(serializer); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new AbstractSerializer(groupId, schemaRegistryClient, + schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { + @Override + protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { + serializer.serialize(var, schema, outputStream); + } + }; + } + + static Serializer deserializer(SerializerConfig config, @Nullable Schema schema, + CustomDeserializer deserializer) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(deserializer); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return deserializer.deserialize(inputStream, writerSchema, readerSchema); + } + }; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java new file mode 100644 index 000000000..bf1910767 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java @@ -0,0 +1,64 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import io.pravega.common.Exceptions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.exceptions.RegistryExceptions; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; + +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ExecutionException; + +/** + * Local cache for storing schemas that are retrieved from the registry service. + */ +public class EncodingCache { + private static final int MAXIMUM_SIZE = 1000; + + private final LoadingCache encodingCache; + EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { + this(groupId, schemaRegistryClient, MAXIMUM_SIZE); + } + + @VisibleForTesting + EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient, int cacheSize) { + encodingCache = CacheBuilder.newBuilder() + .maximumSize(cacheSize) + .build(new CacheLoader() { + @Override + public EncodingInfo load(EncodingId key) { + return schemaRegistryClient.getEncodingInfo(groupId, key); + } + }); + } + + EncodingInfo getGroupEncodingInfo(EncodingId encodingId) { + try { + return encodingCache.get(encodingId); + } catch (ExecutionException e) { + if (e.getCause() != null && Exceptions.unwrap(e.getCause()) instanceof RegistryExceptions) { + throw (RegistryExceptions) e.getCause(); + } else { + throw new RuntimeException(e.getCause()); + } + } + } + + @VisibleForTesting + ConcurrentMap getMapForCache() { + return encodingCache.asMap(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java new file mode 100644 index 000000000..0a8837df6 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java @@ -0,0 +1,42 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.JSONSchema; + +import java.io.IOException; +import java.io.InputStream; + +import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; + +class JsonDeserializer extends AbstractDeserializer { + private final JSONSchema jsonSchema; + private final ObjectMapper objectMapper; + + JsonDeserializer(String groupId, SchemaRegistryClient client, + JSONSchema schema, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, true, decoders, encodingCache, encodeHeader); + Preconditions.checkNotNull(schema); + this.jsonSchema = schema; + this.objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, Visibility.ANY); + } + + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + return objectMapper.readValue(inputStream, jsonSchema.getDerived()); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java new file mode 100644 index 000000000..29daaa05d --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java @@ -0,0 +1,37 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.IOException; +import java.io.InputStream; + +class JsonGenericDeserializer extends AbstractDeserializer> { + private final ObjectMapper objectMapper; + + JsonGenericDeserializer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoders, encodingCache, encodeHeader); + this.objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); + } + + @Override + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + JsonNode obj = objectMapper.readTree(inputStream); + return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (JsonNode) y); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java new file mode 100644 index 000000000..8c896a3ab --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -0,0 +1,37 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.JSONSchema; + +import java.io.IOException; +import java.io.OutputStream; + +class JsonSerializer extends AbstractSerializer { + private final ObjectMapper objectMapper; + JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, + Encoder encoder, boolean registerSchema, boolean encodeHeader) { + super(groupId, client, schema, encoder, registerSchema, encodeHeader); + objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); + } + + @Override + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { + objectMapper.writeValue(outputStream, var); + outputStream.flush(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java new file mode 100644 index 000000000..76dc8c983 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -0,0 +1,128 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.schemas.JSONSchema; +import lombok.extern.slf4j.Slf4j; + +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for json serializers and deserializers. + */ +@Slf4j +class JsonSerializerFactory { + static Serializer serializer(SerializerConfig config, JSONSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()); + } + + static Serializer deserializer(SerializerConfig config, JSONSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new JsonDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()); + } + + static Serializer> genericDeserializer(SerializerConfig config) { + Preconditions.checkNotNull(config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + } + + static Serializer jsonStringDeserializer(SerializerConfig config) { + Preconditions.checkNotNull(config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + } + + static Serializer multiTypeSerializer( + SerializerConfig config, Map, JSONSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + Map, AbstractSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()))); + return new MultiplexedSerializer<>(serializerMap); + } + + static Serializer multiTypeDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, config.getDecoders(), encodingCache); + } + + static Serializer>> typedOrGenericDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()))); + JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, genericDeserializer, config.getDecoders(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java new file mode 100644 index 000000000..5add11d9b --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -0,0 +1,38 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.IOException; +import java.io.InputStream; + +class JsonStringDeserializer extends AbstractDeserializer { + private final ObjectMapper objectMapper; + + JsonStringDeserializer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoders, encodingCache, encodeHeader); + this.objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); + } + + @Override + protected String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + Object obj = objectMapper.readValue(inputStream, Object.class); + return objectMapper.writeValueAsString(obj); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java new file mode 100644 index 000000000..bee87cb01 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -0,0 +1,211 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; +import com.google.protobuf.DynamicMessage; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; + +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Function; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; +import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; + +/** + * Internal Factory class for multi format serializers and deserializers. + * These serializers can be used to work with streams when either you dont know the format beforehand or the stream allows + * for multiple formats. + */ +@Slf4j +class MultiFormatSerializerFactory { + // region multi format + static Serializer> serializer(SerializerConfig config) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + return serializerInternal(config, Collections.emptyMap()); + } + + static Serializer> deserializerWithSchema(SerializerConfig config) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + return deserializerInternal(config, Collections.emptyMap(), NO_TRANSFORM); + } + + /** + * A deserializer that can read data where each event could be written with different serialization formats. + * Formats supported are protobuf, avro and json. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into WithSchema containing {@link JsonNode} and {@link JSONSchema}. + * + * This also takes a transform function which is applied on the deserialized object and should transform the object + * into the type T. + * + * @param config serializer config + * @param transform a transform function that transforms the deserialized object based on the serialization format + * into an object of type T. + * @param Type of object to get back from deserializer. + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + static Serializer deserializeAsT(SerializerConfig config, + BiFunction transform) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(transform); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + return deserializeAsTInternal(config, Collections.emptyMap(), transform); + } + // endregion + + private static Serializer> serializerInternal(SerializerConfig config, + Map> customSerializers) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + String groupId = config.getGroupId(); + + // if serializer is not already present, create a new serializer. + Function> serializerFunction = + x -> getPravegaSerializer(config, customSerializers, schemaRegistryClient, groupId, x); + return new MultipleFormatSerializer(serializerFunction); + } + + private static Serializer deserializeAsTInternal(SerializerConfig config, + Map> deserializers, + BiFunction transform) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + AbstractDeserializer protobuf = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + AbstractDeserializer avro = new AvroGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + + deserializers.forEach((key, value) -> { + map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return value.deserialize(inputStream, writerSchema, readerSchema); + } + }); + }); + + return new MultipleFormatDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoders(), + encodingCache, transform); + } + + private static Serializer> deserializerInternal(SerializerConfig config, Map> deserializers, BiFunction transform) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + AbstractDeserializer protobuf = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), + encodingCache, config.isWriteEncodingHeader()); + AbstractDeserializer avro = new AvroGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + + deserializers.forEach((key, value) -> { + map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return value.deserialize(inputStream, writerSchema, readerSchema); + } + }); + }); + + return new MultiFormatWithSchemaDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoders(), + encodingCache, transform); + } + + @SuppressWarnings("unchecked") + private static AbstractSerializer getPravegaSerializer( + SerializerConfig config, Map> customSerializers, + SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { + switch (schemaInfo.getSerializationFormat()) { + case Avro: + return new AvroSerializer<>(groupId, schemaRegistryClient, + AvroSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema()); + case Protobuf: + ProtobufSerializer pSerializer = new ProtobufSerializer<>(groupId, schemaRegistryClient, + ProtobufSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); + return (AbstractSerializer) pSerializer; + case Json: + JsonSerializer jsonSerializer = new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), + config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); + return (AbstractSerializer) jsonSerializer; + case Custom: + return getCustomSerializer(config, customSerializers, schemaRegistryClient, groupId, schemaInfo); + default: + throw new IllegalArgumentException("Serializer not provided"); + } + } + + private static AbstractSerializer getCustomSerializer( + SerializerConfig config, Map> customSerializers, + SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { + if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { + CustomSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); + Schema schema = new Schema() { + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + @Override + public Class getTClass() { + return Object.class; + } + }; + return new AbstractSerializer(groupId, schemaRegistryClient, + schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { + @Override + protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { + serializer.serialize(var, schema, outputStream); + } + }; + } else { + throw new IllegalArgumentException("Serializer for the format not supplied"); + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java new file mode 100644 index 000000000..93e2e7f4d --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java @@ -0,0 +1,44 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; +import java.util.function.BiFunction; + +class MultiFormatWithSchemaDeserializer extends AbstractDeserializer> { + private final Map genericDeserializers; + private final BiFunction transform; + + MultiFormatWithSchemaDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoders decoders, + EncodingCache encodingCache, BiFunction transform) { + super(groupId, client, null, false, decoders, encodingCache, true); + this.genericDeserializers = genericDeserializers; + this.transform = transform; + } + + @Override + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + Preconditions.checkNotNull(writerSchema); + Object obj = genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema); + if (obj instanceof WithSchema) { + obj = ((WithSchema) obj).getObject(); + } + return new WithSchema<>(writerSchema, obj, transform); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java new file mode 100644 index 000000000..b47e5c9a4 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; +import java.util.function.BiFunction; + +class MultipleFormatDeserializer extends AbstractDeserializer { + private final Map genericDeserializers; + private final BiFunction transform; + + MultipleFormatDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoders decoders, + EncodingCache encodingCache, BiFunction transform) { + super(groupId, client, null, false, decoders, encodingCache, true); + this.genericDeserializers = genericDeserializers; + this.transform = transform; + } + + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + Preconditions.checkNotNull(writerSchema); + return transform.apply(writerSchema.getSerializationFormat(), + genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java new file mode 100644 index 000000000..f3e3d6bef --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import org.apache.commons.lang3.NotImplementedException; + +import java.nio.ByteBuffer; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; + +class MultipleFormatSerializer implements Serializer> { + private final Function> serializerFunction; + + private final ConcurrentHashMap> serializersMap; + + MultipleFormatSerializer(Function> serializerFunction) { + this.serializerFunction = serializerFunction; + this.serializersMap = new ConcurrentHashMap<>(); + } + + @Override + public ByteBuffer serialize(WithSchema value) { + AbstractSerializer serializer = serializersMap.computeIfAbsent(value.getSchema().getSchemaInfo(), + x -> serializerFunction.apply(value.getSchema().getSchemaInfo())); + return serializer.serialize(value.getObject()); + } + + @Override + public WithSchema deserialize(ByteBuffer serializedValue) { + throw new NotImplementedException("Deserializer not implemented"); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java new file mode 100644 index 000000000..1baf69f21 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -0,0 +1,45 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +class MultiplexedAndGenericDeserializer extends AbstractDeserializer> { + private final Map> deserializers; + private final AbstractDeserializer genericDeserializer; + + MultiplexedAndGenericDeserializer(String groupId, SchemaRegistryClient client, + Map> deserializers, + AbstractDeserializer genericDeserializer, + SerializerConfig.Decoders decoders, + EncodingCache encodingCache) { + super(groupId, client, null, false, decoders, encodingCache, true); + this.deserializers = deserializers; + this.genericDeserializer = genericDeserializer; + } + + @Override + protected Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + Preconditions.checkNotNull(writerSchema); + AbstractDeserializer deserializer = deserializers.get(writerSchema.getType()); + if (deserializer == null) { + return Either.right(genericDeserializer.deserialize(inputStream, writerSchema, readerSchema)); + } else { + return Either.left(deserializers.get(writerSchema.getType()).deserialize(inputStream, writerSchema, readerSchema)); + } + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java new file mode 100644 index 000000000..d10c5af21 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -0,0 +1,47 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import org.apache.commons.lang3.SerializationException; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +/** + * Deserializer which multiplexes for multiple object types. Based on the supplied object, it invokes the + * deserializer for that object type. + * + * @param Type of object. + */ +class MultiplexedDeserializer extends AbstractDeserializer { + private final Map> deserializers; + + MultiplexedDeserializer(String groupId, SchemaRegistryClient client, + Map> deserializers, + SerializerConfig.Decoders decoders, + EncodingCache encodingCache) { + super(groupId, client, null, false, decoders, encodingCache, true); + this.deserializers = deserializers; + } + + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { + Preconditions.checkNotNull(writerSchema); + AbstractDeserializer deserializer = deserializers.get(writerSchema.getType()); + if (deserializer == null) { + throw new SerializationException("deserializer not supplied for type " + writerSchema.getType()); + } + return deserializer.deserialize(inputStream, writerSchema, readerSchema); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java new file mode 100644 index 000000000..34392b620 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; +import java.util.Map; + +/** + * Serializer to multiplex serialization of multiple types of events with same serialization format. + * + * @param Type of object. + */ +class MultiplexedSerializer implements Serializer { + private final Map, AbstractSerializer> serializers; + + MultiplexedSerializer(Map, AbstractSerializer> serializers) { + this.serializers = serializers; + } + + @Override + @SuppressWarnings("unchecked") + public ByteBuffer serialize(T obj) { + Class tClass = (Class) obj.getClass(); + AbstractSerializer serializer = serializers.get(tClass); + return serializer.serialize(obj); + } + + @Override + public T deserialize(ByteBuffer serializedValue) { + throw new IllegalStateException(); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java new file mode 100644 index 000000000..9cd4a460f --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java @@ -0,0 +1,40 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.InvalidProtocolBufferException; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.ProtobufSchema; + +import java.io.IOException; +import java.io.InputStream; + +public class ProtobufDeserializer extends AbstractDeserializer { + private final ProtobufSchema protobufSchema; + ProtobufDeserializer(String groupId, SchemaRegistryClient client, + ProtobufSchema schema, SerializerConfig.Decoders decoder, + EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, true, decoder, encodingCache, encodeHeader); + Preconditions.checkNotNull(schema); + this.protobufSchema = schema; + } + + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + try { + return protobufSchema.getParser().parseFrom(inputStream); + } catch (InvalidProtocolBufferException e) { + throw new IOException("Invalid protobuffer serialized bytes", e); + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java new file mode 100644 index 000000000..878bdbfb1 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java @@ -0,0 +1,89 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.Descriptors; +import com.google.protobuf.DynamicMessage; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.NameUtil; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import org.apache.commons.lang3.SerializationException; + +import javax.annotation.Nullable; +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.ConcurrentHashMap; + +public class ProtobufGenericDeserializer extends AbstractDeserializer { + private final ConcurrentHashMap knownSchemas; + + ProtobufGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, + SerializerConfig.Decoders decoder, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, false, decoder, encodingCache, encodeHeader); + Preconditions.checkArgument(isEncodeHeader() || schema != null); + knownSchemas = new ConcurrentHashMap<>(); + } + + @Override + protected DynamicMessage deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + Preconditions.checkArgument(writerSchemaInfo != null || readerSchemaInfo != null); + + SchemaInfo schemaToUse = readerSchemaInfo == null ? writerSchemaInfo : readerSchemaInfo; + Descriptors.Descriptor messageType = knownSchemas.computeIfAbsent(schemaToUse, this::parseSchema); + + return DynamicMessage.parseFrom(messageType, inputStream); + } + + private Descriptors.Descriptor parseSchema(SchemaInfo schemaToUse) { + DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getFileDescriptorSet(); + + int count = descriptorSet.getFileCount(); + String[] tokens = NameUtil.extractNameAndQualifier(schemaToUse.getType()); + String name = tokens[0]; + String pckg = tokens[1]; + DescriptorProtos.FileDescriptorProto mainDescriptor = null; + for (DescriptorProtos.FileDescriptorProto x : descriptorSet.getFileList()) { + boolean packageMatch; + if (x.getPackage() == null) { + packageMatch = Strings.isNullOrEmpty(pckg); + } else { + packageMatch = x.getPackage().equals(pckg); + } + if (packageMatch && x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name))) { + mainDescriptor = x; + break; + } + } + if (mainDescriptor == null) { + throw new IllegalArgumentException("FileDescriptorSet doesn't contain the schema for the object type."); + } + + Descriptors.FileDescriptor[] dependencyArray = new Descriptors.FileDescriptor[count]; + Descriptors.FileDescriptor fd; + try { + for (int i = 0; i < count; i++) { + fd = Descriptors.FileDescriptor.buildFrom( + descriptorSet.getFile(i), + new Descriptors.FileDescriptor[]{}); + dependencyArray[i] = fd; + } + + fd = Descriptors.FileDescriptor.buildFrom(mainDescriptor, dependencyArray); + } catch (Descriptors.DescriptorValidationException e) { + throw new IllegalArgumentException("Invalid protobuf schema."); + } + return fd.getMessageTypes().stream().filter(x -> x.getName().equals(name)) + .findAny().orElseThrow(() -> new SerializationException(String.format("schema for %s not found", schemaToUse.getType()))); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java new file mode 100644 index 000000000..e95ae6672 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java @@ -0,0 +1,32 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.protobuf.Message; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.ProtobufSchema; + +import java.io.IOException; +import java.io.OutputStream; + +class ProtobufSerializer extends AbstractSerializer { + ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, + Encoder encoder, boolean registerSchema, boolean encodeHeader) { + super(groupId, client, schema, encoder, registerSchema, encodeHeader); + } + + @Override + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { + var.writeTo(outputStream); + outputStream.flush(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java new file mode 100644 index 000000000..ee3975ddc --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -0,0 +1,122 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.Message; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; + +/** + * Internal Factory class for protobuf serializers and deserializers. + */ +@Slf4j +class ProtobufSerializerFactory { + static Serializer serializer(SerializerConfig config, + ProtobufSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()); + } + + static Serializer deserializer(SerializerConfig config, + ProtobufSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new ProtobufDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()); + } + + static Serializer genericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(schema != null || config.isWriteEncodingHeader(), + "Either read schema should be supplied or events should be tagged with encoding ids."); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + String groupId = config.getGroupId(); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new ProtobufGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()); + } + + static Serializer multiTypeSerializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + + Map, AbstractSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), + config.isRegisterSchema(), config.isWriteEncodingHeader()))); + return new MultiplexedSerializer<>(serializerMap); + } + + static Serializer multiTypeDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), encodingCache); + } + + static Serializer> typedOrGenericDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, + config.isWriteEncodingHeader()))); + ProtobufGenericDeserializer genericDeserializer = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoders(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java new file mode 100644 index 000000000..a9c648bdb --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -0,0 +1,287 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; +import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.Codecs; +import io.pravega.schemaregistry.codec.Decoder; +import io.pravega.schemaregistry.codec.Encoder; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.CodecType; +import io.pravega.schemaregistry.contract.data.Compatibility; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.AccessLevel; +import lombok.Builder; +import lombok.Data; +import lombok.Getter; +import lombok.NonNull; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Serializer Config class that is passed to {@link SerializerFactory} for creating serializer. + */ +@Data +@Builder +public class SerializerConfig { + /** + * Name of the group. + */ + @NonNull + private final String groupId; + /** + * Namespace for the group. + */ + private final String namespace; + /** + * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. + * Exactly one of the two option has to be supplied. + */ + @Getter(AccessLevel.NONE) + private final SchemaRegistryClientConfig registryConfig; + /** + * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. + * Exactly one of the two option has to be supplied. + */ + @Getter(AccessLevel.NONE) + private final SchemaRegistryClient registryClient; + /** + * Flag to tell the serializer if the schema should be automatically registered before using it in {@link io.pravega.client.stream.EventStreamWriter}. + * It is recommended to register keep this flag as false in production systems and manage schema evolution explicitly and + * in lockstep with upgrade of existing pravega client applications. + */ + private final boolean registerSchema; + /** + * Flag to tell the serializer if the codec should be automatically registered before using the serializer in + * {@link io.pravega.client.stream.EventStreamWriter}. + * It is recommended to register keep this flag as false in production systems and manage codecTypes used by writers explicitly + * so that readers are aware of encodings used. + */ + private final boolean registerCodec; + /** + * Codec to use for encoding events after serializing them. + */ + private final Encoder encoder; + /** + * Function that should be applied on serialized data read from stream. This is invoked after reading the codecType + * from {@link EncodingInfo} and using the codec type read from it. + * It should return the decoded data back to the deserializer. + * Use {@link SerializerConfigBuilder#decoder(String, Decoder)} to add decoders. + * Any number of decoders can be added. + */ + private final Decoders decoders; + /** + * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. + * This is important when the writers have used a custom codec for which reader should be instantiated with a corresponding + * decoder otherwise it would fail to decode and read the data. + * As an example, if writer applications had implemented a custom encryption encoder which encrypted the data after + * serializing it, then the data will include an encoding id that will be resolved to the schema and the codec type name + * for the encryption codec. If the readers are not provided with a decoder for all data encoded with that codec type, + * it would fail to decode that data. This flag ensures that the readers check retrieve all the registered codec types + * with the registry service and fail if they are not instantiated with decoders for all the registered codec types. + * + * The default value for this is true. + */ + private final boolean failOnCodecMismatch; + /** + * Flag to tell the serializer/deserializer if the group should be created automatically. + * It is recommended to register keep this flag as false in production systems and create groups and add schemas + */ + @Getter(AccessLevel.NONE) + private final GroupProperties createGroup; + /** + * Flag to tell the serializer/deserializer if the encoding id should be added as a header with each event. + * By default this is set to true. If users choose to not add the header, they should do so in all their writer and + * reader applications for the given stream. + * + * Adding the event header is a requirement for following cases: + * If {@link SerializationFormat#Avro} is chosen for a group, the event header cannot be false. + * If streams can have multiple types of events, this cannot be false. + * If streams can multiple formats of events, this cannot be false. + */ + private final boolean writeEncodingHeader; + + private SerializerConfig(String groupId, String namespace, SchemaRegistryClientConfig config, SchemaRegistryClient client, + boolean registerSchema, boolean registerCodec, Encoder encoder, Decoders decoders, boolean failOnCodecMismatch, + GroupProperties createGroup, boolean writeEncodingHeader) { + Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); + Preconditions.checkArgument(client != null || config != null, "Either registry client or config needs to be supplied"); + this.groupId = groupId; + this.namespace = namespace; + this.registryClient = client; + this.registryConfig = config; + this.registerSchema = registerSchema; + this.registerCodec = registerCodec; + this.encoder = encoder; + this.decoders = decoders; + this.failOnCodecMismatch = failOnCodecMismatch; + this.createGroup = createGroup; + this.writeEncodingHeader = writeEncodingHeader; + } + + Either getRegistryConfigOrClient() { + if (registryClient == null) { + return Either.left(registryConfig); + } else { + return Either.right(registryClient); + } + } + + public boolean isCreateGroup() { + return createGroup != null; + } + + GroupProperties getGroupProperties() { + return createGroup; + } + + public static final class SerializerConfigBuilder { + private Codec encoder = Codecs.None.getCodec(); + + private Decoders decoders = new Decoders(); + + private boolean registerSchema = false; + private boolean registerCodec = false; + private boolean failOnCodecMismatch = true; + private boolean writeEncodingHeader = true; + private SchemaRegistryClientConfig registryConfig = null; + private SchemaRegistryClient registryClient = null; + private String namespace = null; + + /** + * Add a decoder for decoding data encoded with the {@link Codec#getCodecType()}. + * + * @param name Name of codec from {@link CodecType#getName()}. + * @param decoder decoder implementation to use for decoding data encoded with the {@link Codec#getCodecType()}. + * @return Builder. + */ + public SerializerConfigBuilder decoder(String name, Decoder decoder) { + this.decoders.add(name, decoder); + return this; + } + + /** + * Add multiple decoders. + * + * @param decoders map of codec name to decoder for the codec. + * @return Builder. + */ + public SerializerConfigBuilder decoders(Map decoders) { + this.decoders.addAll(decoders); + return this; + } + + /** + * Automatically create group with provided group properties values, defaulting compatibility to Full Transitive + * and allowMultipleTypes to true. + * Group creation is idempotent. + * + * @param serializationFormat {@link GroupProperties#serializationFormat}. + * @return Builder + */ + public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat) { + return createGroup(serializationFormat, true); + } + + /** + * Automatically create group with provided group properties values, defaulting compatibility to Full Transitive. + * Group creation is idempotent. + * + * @param serializationFormat {@link GroupProperties#serializationFormat}. + * @param allowMultipleTypes {@link GroupProperties#allowMultipleTypes} + * @return Builder + */ + public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, boolean allowMultipleTypes) { + Compatibility policy = serializationFormat.equals(SerializationFormat.Any) ? Compatibility.allowAny() : + Compatibility.fullTransitive(); + return createGroup(serializationFormat, policy, allowMultipleTypes); + } + + /** + * Automatically create group with provided group properties. Group creation is idempotent. + * + * @param serializationFormat {@link GroupProperties#serializationFormat}. + * @param policy {@link GroupProperties#compatibility} + * @param allowMultipleTypes {@link GroupProperties#allowMultipleTypes} + * @return Builder + */ + public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, Compatibility policy, boolean allowMultipleTypes) { + this.createGroup = new GroupProperties(serializationFormat, policy, allowMultipleTypes); + return this; + } + + /** + * Schema Registry client. Either of client or config should be supplied. + * + * @param client Schema Registry client + * @return Builder + */ + public SerializerConfigBuilder registryClient(SchemaRegistryClient client) { + Preconditions.checkArgument(client != null); + Preconditions.checkState(registryConfig == null, "Cannot specify both client and config"); + this.registryClient = client; + return this; + } + + /** + * Schema Registry client config which is used to initialize the schema registry client. + * Either config or client should be supplied. + * + * @param config Schema Registry client configuration. + * @return Builder + */ + public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) { + Preconditions.checkArgument(config != null); + Preconditions.checkState(registryClient == null, "Cannot specify both client and config"); + this.registryConfig = config; + return this; + } + } + + static class Decoders { + private final ConcurrentHashMap decoders; + + Decoders() { + this.decoders = new ConcurrentHashMap<>(); + this.decoders.put(Codecs.None.getCodec().getName(), Codecs.None.getCodec()); + this.decoders.put(Codecs.GzipCompressor.getCodec().getName(), Codecs.GzipCompressor.getCodec()); + this.decoders.put(Codecs.SnappyCompressor.getCodec().getName(), Codecs.SnappyCompressor.getCodec()); + } + + private void add(String codecName, Decoder decoder) { + Preconditions.checkNotNull(codecName); + Preconditions.checkNotNull(decoder); + decoders.put(codecName, decoder); + } + + private void addAll(Map decoders) { + Preconditions.checkNotNull(decoders); + this.decoders.putAll(decoders); + } + + ByteBuffer decode(CodecType codecType, ByteBuffer bytes) throws IOException { + return decoders.get(codecType.getName()).decode(bytes, codecType.getProperties()); + } + + Set getDecoderNames() { + return decoders.keySet(); + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java new file mode 100644 index 000000000..e2467ddd0 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -0,0 +1,439 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.Message; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; + +import javax.annotation.Nullable; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.function.BiFunction; + +import static com.google.protobuf.DescriptorProtos.FileDescriptorSet; +import static io.pravega.schemaregistry.serializers.WithSchema.JSON_TRANSFORM; +import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; + +@Slf4j +public class SerializerFactory { + // region avro + /** + * Creates a typed avro serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an AvroSchema + * @param Type of event. It accepts either POJO or Avro generated classes and serializes them. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer avroSerializer(SerializerConfig config, AvroSchema schemaContainer) { + return AvroSerializerFactory.serializer(config, schemaContainer); + } + + /** + * Creates a typed avro deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an AvroSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #avroGenericDeserializer} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer avroDeserializer(SerializerConfig config, AvroSchema schemaContainer) { + return AvroSerializerFactory.deserializer(config, schemaContainer); + } + + /** + * Creates a generic avro deserializer. It has the optional parameter for schema. + * If the schema is not supplied, the writer schema is used for deserialization into {@link GenericRecord}. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an AvroSchema. It can be null to indicate that writer schema should + * be used for deserialization. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer avroGenericDeserializer(SerializerConfig config, @Nullable AvroSchema schemaContainer) { + return AvroSerializerFactory.genericDeserializer(config, schemaContainer); + } + + /** + * A multiplexed Avro serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer avroMultiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { + return AvroSerializerFactory.multiTypeSerializer(config, schemas); + } + + /** + * A multiplexed Avro Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer avroMultiTypeDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + return AvroSerializerFactory.multiTypeDeserializer(config, schemas); + } + + /** + * A multiplexed Avro Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects or a generic + * object + */ + public static Serializer> avroTypedOrGenericDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + return AvroSerializerFactory.typedOrGenericDeserializer(config, schemas); + } + // endregion + + // region protobuf + + /** + * Creates a typed protobuf serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an Protobuf Schema. + * @param Type of event. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer protobufSerializer(SerializerConfig config, + ProtobufSchema schemaContainer) { + return ProtobufSerializerFactory.serializer(config, schemaContainer); + } + + /** + * Creates a typed protobuf deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an ProtobufSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #protobufGenericDeserializer} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer protobufDeserializer(SerializerConfig config, + ProtobufSchema schemaContainer) { + return ProtobufSerializerFactory.deserializer(config, schemaContainer); + } + + /** + * Creates a generic protobuf deserializer. It has the optional parameter for schema. + * If the schema is not supplied, the writer schema is used for deserialization into {@link DynamicMessage}. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema container that encapsulates an ProtobufSchema. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer protobufGenericDeserializer(SerializerConfig config, + @Nullable ProtobufSchema schema) { + return ProtobufSerializerFactory.genericDeserializer(config, schema); + } + + /** + * A multiplexed Protobuf serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer protobufMultiTypeSerializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + return ProtobufSerializerFactory.multiTypeSerializer(config, schemas); + } + + /** + * A multiplexed protobuf Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer protobufMultiTypeDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + return ProtobufSerializerFactory.multiTypeDeserializer(config, schemas); + } + + /** + * A multiplexed protobuf Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer> protobufTypedOrGenericDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + return ProtobufSerializerFactory.typedOrGenericDeserializer(config, schemas); + } + //endregion + + // region json + + /** + * Creates a typed json serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an Json Schema. + * @param Type of event. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer jsonSerializer(SerializerConfig config, JSONSchema schemaContainer) { + return JsonSerializerFactory.serializer(config, schemaContainer); + } + + /** + * Creates a typed json deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaContainer Schema container that encapsulates an JSONSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #jsonGenericDeserializer} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer jsonDeserializer(SerializerConfig config, JSONSchema schemaContainer) { + return JsonSerializerFactory.deserializer(config, schemaContainer); + } + + /** + * Creates a generic json deserializer. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer> jsonGenericDeserializer(SerializerConfig config) { + return JsonSerializerFactory.genericDeserializer(config); + } + + /** + * Creates a generic json deserializer which deserializes bytes into a json string. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer jsonStringDeserializer(SerializerConfig config) { + return JsonSerializerFactory.jsonStringDeserializer(config); + } + + /** + * A multiplexed Json serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer jsonMultiTypeSerializer( + SerializerConfig config, Map, JSONSchema> schemas) { + return JsonSerializerFactory.multiTypeSerializer(config, schemas); + } + + /** + * A multiplexed json Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer jsonMultiTypeDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + return JsonSerializerFactory.multiTypeDeserializer(config, schemas); + } + + /** + * A multiplexed json Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer>> jsonTypedOrGenericDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + return JsonSerializerFactory.typedOrGenericDeserializer(config, schemas); + } + //endregion + + // region custom + + /** + * A serializer that uses user supplied implementation of {@link CustomSerializer} for serializing the objects. + * It also takes user supplied schema and registers/validates it against the registry. + * + * @param config Serializer config. + * @param schema Schema for the object to serialize + * @param serializer user supplied serializer + * @param Type of object to serialize + * @return Serializer that uses user supplied serialization function for serializing events. + */ + public static Serializer customSerializer(SerializerConfig config, Schema schema, CustomSerializer serializer) { + return CustomSerializerFactory.serializer(config, schema, serializer); + } + + /** + * A deserializer that uses user supplied implementation of {@link CustomDeserializer} for deserializing the data into + * typed java objects. + * + * @param config Serializer config. + * @param schema optional Schema for the object to deserialize + * @param deserializer user supplied deserializer + * @param Type of object to deserialize + * @return Deserializer that uses user supplied deserialization function for deserializing payload into typed events. + */ + public static Serializer customDeserializer(SerializerConfig config, @Nullable Schema schema, + CustomDeserializer deserializer) { + return CustomSerializerFactory.deserializer(config, schema, deserializer); + } + // endregion + + // region multiformat + /** + * A multi format serializer that takes objects with schemas for the three supported formats - avro, protobuf and json. + * It then serializes the object using the format specific serializer. The events are supplied using an encapsulating + * object called WithSchema which has both the event and the schema. + * It only serializes the events while ensuring that the corresponding schema was registered with the service. + * If {@link SerializerConfig#registerSchema} is set to true, it registers the schema before using it. + * This serializer contacts schema registry once for every new schema that it encounters, and it fetches the + * encoding id for the schema and codec pair. + * + * @param config Serializer config + * @return A multi format serializer which serializes events from all three of Avro, Protobuf and json formats. + */ + public static Serializer> serializerWithSchema(SerializerConfig config) { + return MultiFormatSerializerFactory.serializer(config); + } + + /** + * A deserializer that can deserialize data where each event could be written with either of avro, protobuf or json + * serialization formats. It deserializes them into format specific generic objects. + * An event serialized with avro is deserialized into {@link GenericRecord} or {@link Object} with schema as {@link org.apache.avro.Schema}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage} with schema as {@link FileDescriptorSet}. + * An event serialized with json is deserialized into a {@link JsonNode} with schema as {@link JsonSchema}. + * The object and schema are wrapped in {@link WithSchema} object. + * + * @param config serializer config + * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. + */ + public static Serializer> deserializerWithSchema(SerializerConfig config) { + return MultiFormatSerializerFactory.deserializerWithSchema(config); + } + + /** + * A deserializer that can read data where each event could be written with either of avro, protobuf or json + * serialization formats. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into {@link WithSchema} object of {@link JsonNode} and {@link JsonSchema}. + * + * @param config serializer config + * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. + */ + public static Serializer genericDeserializer(SerializerConfig config) { + return deserializeAsT(config, NO_TRANSFORM); + } + + /** + * This is a convenience serializer shortcut that calls {@link #deserializeAsT} with a transform to + * convert the object to JSON string. + * + * @param config serializer config + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + public static Serializer deserializeAsJsonString(SerializerConfig config) { + return deserializeAsT(config, JSON_TRANSFORM); + } + + /** + * A deserializer that can read data where each event could be written with different serialization formats. + * Formats supported are protobuf, avro and json. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into {@link WithSchema} object of {@link JsonNode} and {@link JsonSchema}. + * + * This also takes a transform function which is applied on the deserialized object and should transform the object + * into the type T. + * + * @param config serializer config + * @param transform a transform function that transforms the deserialized object based on the serialization format + * into an object of type T. + * @param Type of object to get back from deserializer. + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + public static Serializer deserializeAsT(SerializerConfig config, + BiFunction transform) { + return MultiFormatSerializerFactory.deserializeAsT(config, transform); + } + // endregion +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java new file mode 100644 index 000000000..c7816189a --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -0,0 +1,79 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Strings; +import io.pravega.client.ClientConfig; +import io.pravega.client.stream.impl.Credentials; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.contract.data.CodecType; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; +import java.util.stream.Collectors; + +@Slf4j +class SerializerFactoryHelper { + static SchemaRegistryClient initForSerializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); + createGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return schemaRegistryClient; + } + + static SchemaRegistryClient initForDeserializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); + createGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + return schemaRegistryClient; + } + + private static SchemaRegistryClient getSchemaRegistryClient(SerializerConfig config) { + if (config.getRegistryConfigOrClient().isLeft()) { + // if auth is enabled and creds are not supplied, reuse the credentials from pravega client config which may + // be loaded from system properties. + SchemaRegistryClientConfig left = config.getRegistryConfigOrClient().getLeft(); + if (left.isAuthEnabled() && Strings.isNullOrEmpty(left.getAuthMethod())) { + Credentials creds = ClientConfig.builder().build().getCredentials(); + left = SchemaRegistryClientConfig.builder().schemaRegistryUri(left.getSchemaRegistryUri()).authEnabled(left.isAuthEnabled()) + .authMethod(creds.getAuthenticationType()).authToken(creds.getAuthenticationToken()) + .build(); + } + return SchemaRegistryClientFactory.withNamespace(config.getNamespace(), left); + } else { + return config.getRegistryConfigOrClient().getRight(); + } + } + + private static void createGroup(SchemaRegistryClient client, SerializerConfig config) { + if (config.isCreateGroup()) { + client.addGroup(config.getGroupId(), config.getGroupProperties()); + } + } + + private static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { + if (config.isRegisterCodec()) { + client.addCodecType(config.getGroupId(), config.getEncoder().getCodecType()); + } + } + + private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { + if (config.isFailOnCodecMismatch()) { + List codecTypesInGroup = client.getCodecTypes(config.getGroupId()).stream() + .map(CodecType::getName).collect(Collectors.toList()); + if (!config.getDecoders().getDecoderNames().containsAll(codecTypesInGroup)) { + log.warn("Not all CodecTypes are supported by reader. Required codecTypes = {}", codecTypesInGroup); + throw new RuntimeException(String.format("Need all codecTypes in %s", codecTypesInGroup.toString())); + } + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java new file mode 100644 index 000000000..3787da2f1 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -0,0 +1,248 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.AccessLevel; +import lombok.Getter; +import org.apache.avro.generic.IndexedRecord; + +import java.util.function.BiFunction; + +/** + * Container class for a deserialized object with its corresponding schema. + * + * @param Type of object. + */ +public class WithSchema { + public static final BiFunction JSON_TRANSFORM = WithSchema::toJsonString; + + public static final BiFunction NO_TRANSFORM = (x, y) -> y; + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final JsonFormat.Printer PRINTER = JsonFormat.printer().preservingProtoFieldNames() + .usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); + + @Getter(AccessLevel.PACKAGE) + private final Schema schema; + @Getter + private final Object object; + private final BiFunction transform; + + WithSchema(SchemaInfo schemaInfo, Object obj, BiFunction transform) { + this.object = obj; + this.transform = transform; + if (schemaInfo != null) { + this.schema = convertToSchema(schemaInfo); + } else { + this.schema = null; + } + } + + private Schema convertToSchema(SchemaInfo schemaInfo) { + Schema schema; + switch (schemaInfo.getSerializationFormat()) { + case Avro: + schema = AvroSchema.from(schemaInfo); + break; + case Protobuf: + schema = ProtobufSchema.from(schemaInfo); + break; + case Json: + schema = JSONSchema.from(schemaInfo); + break; + case Custom: + schema = new Schema() { + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + @Override + public Class getTClass() { + return Object.class; + } + }; + break; + default: + throw new IllegalArgumentException("Serialization format not supported"); + } + return schema; + } + + /** + * Check whether the schema is of type Avro. + * + * @return True if the schema is for avro, false otherwise. + */ + public boolean hasAvroSchema() { + return schema instanceof AvroSchema; + } + + /** + * Avro Schema for the underlying deserialized object. This is available if {@link WithSchema#hasAvroSchema()} returns true. + * This means underlying object was serialized as avro. + * + * @return Protobuf {@link org.apache.avro.Schema} representing the schema for the object. + */ + @SuppressWarnings("unchecked") + public org.apache.avro.Schema getAvroSchema() { + return ((AvroSchema) schema).getSchema(); + } + + /** + * Check whether the schema is of type Protobuf. + * + * @return True if the schema is for protobuf, false otherwise. + */ + public boolean hasProtobufSchema() { + return schema instanceof ProtobufSchema; + } + + /** + * Protobuf Schema for the underlying deserialized object. This is available if {@link WithSchema#hasProtobufSchema()} returns true. + * This means underlying object was serialized as protobuf. + * + * @return Protobuf {@link com.google.protobuf.DescriptorProtos.FileDescriptorSet} representing the schema for the object. + */ + @SuppressWarnings("unchecked") + public DescriptorProtos.FileDescriptorSet getProtobufSchema() { + return ((ProtobufSchema) schema).getFileDescriptorSet(); + } + + /** + * Check whether the schema is of type Json. + * + * @return True if the schema is for json, false otherwise + */ + public boolean hasJsonSchema() { + return schema instanceof JSONSchema; + } + + /** + * Json Schema for the underlying deserialized object. This is available if {@link WithSchema#hasJsonSchema()} returns true. + * This means underlying object was serialized as Json. + * + * @return Json schema String representing the schema for the object. + */ + @SuppressWarnings("unchecked") + public org.everit.json.schema.Schema getJsonSchema() { + return ((JSONSchema) schema).getSchema(); + } + + /** + * Applies the transform on the deserialized object. + * + * @return Transformed object of type T. + */ + public T getTransformed() { + if (schema == null) { + throw new IllegalArgumentException("Need schema to be able to transform."); + } + return transform.apply(schema.getSchemaInfo().getSerializationFormat(), object); + } + + /** + * Applies JsonString transformation to convert the deserialized object into a json string. + * + * @return Json String for the object. + */ + public String getJsonString() { + if (schema == null) { + throw new IllegalArgumentException(); + } + return JSON_TRANSFORM.apply(schema.getSchemaInfo().getSerializationFormat(), object); + } + + private static String toJsonString(SerializationFormat format, Object deserialize) { + String jsonString; + try { + switch (format) { + case Avro: + if (deserialize instanceof IndexedRecord) { + jsonString = deserialize.toString(); + } else { + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + break; + case Protobuf: + jsonString = PRINTER.print((DynamicMessage) deserialize); + break; + case Json: + if (deserialize instanceof WithSchema) { + jsonString = OBJECT_MAPPER.writeValueAsString(((WithSchema) deserialize).object); + } else { + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + break; + default: + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + } catch (InvalidProtocolBufferException | JsonProcessingException e) { + throw new IllegalArgumentException("Invalid deserialized object. Failed to convert to json string.", e); + } + return jsonString; + } + + /** + * Create WithSchema object for avro. + * + * @param object Object. + * @param avroSchema Avro Schema for object. + * @param Type of object. + * @return A WithSchema object which has Avro Schema and the corresponding object. + */ + public static WithSchema avro(T object, AvroSchema avroSchema) { + Preconditions.checkNotNull(object, "object cannot be null"); + Preconditions.checkNotNull(avroSchema, "schema cannot be null"); + return new WithSchema<>(avroSchema.getSchemaInfo(), object, (x, y) -> object); + } + + /** + * Create WithSchema object for protobuf. + * + * @param object Object. + * @param protobufSchema Protobuf Schema for object. + * @param Type of object. + * @return A WithSchema object which has Protobuf Schema and the corresponding object. + */ + public static WithSchema proto(T object, ProtobufSchema protobufSchema) { + Preconditions.checkNotNull(object, "object cannot be null"); + Preconditions.checkNotNull(protobufSchema, "schema cannot be null"); + return new WithSchema<>(protobufSchema.getSchemaInfo(), object, (x, y) -> object); + } + + /** + * Create WithSchema object for json. + * + * @param object Object. + * @param jsonSchema Json Schema for object. + * @param Type of object. + * @return A WithSchema object which has Json schema and the corresponding object. + */ + public static WithSchema json(T object, JSONSchema jsonSchema) { + Preconditions.checkNotNull(object, "object cannot be null"); + Preconditions.checkNotNull(jsonSchema, "schema cannot be null"); + return new WithSchema<>(jsonSchema.getSchemaInfo(), object, (x, y) -> object); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java new file mode 100644 index 000000000..6e86596b1 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -0,0 +1,57 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableMap; +import io.pravega.common.io.EnhancedByteArrayOutputStream; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; + +import static org.junit.Assert.*; + +public class CodecTest { + @Test + public void testCodec() throws IOException { + byte[] testStringBytes = "this is a test string".getBytes(Charsets.UTF_8); + Codec snappy = Codecs.SnappyCompressor.getCodec(); + assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getCodec().getCodecType()); + EnhancedByteArrayOutputStream byteArrayOutputStream = new EnhancedByteArrayOutputStream(); + snappy.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + ByteBuffer encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); + assertNotEquals(encoded.remaining(), testStringBytes.length); + ByteBuffer decoded = snappy.decode(encoded, ImmutableMap.of()); + assertTrue(Arrays.equals(decoded.array(), testStringBytes)); + + byteArrayOutputStream = new EnhancedByteArrayOutputStream(); + Codec gzip = Codecs.GzipCompressor.getCodec(); + assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getCodec().getCodecType()); + gzip.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); + assertNotEquals(encoded.remaining(), testStringBytes.length); + decoded = gzip.decode(encoded, ImmutableMap.of()); + assertTrue(Arrays.equals(decoded.array(), testStringBytes)); + + byteArrayOutputStream = new EnhancedByteArrayOutputStream(); + Codec none = Codecs.None.getCodec(); + assertEquals(none.getCodecType(), Codecs.None.getCodec().getCodecType()); + none.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); + assertEquals(encoded.remaining(), testStringBytes.length); + decoded = none.decode(encoded, ImmutableMap.of()); + + byte[] decodedArray = new byte[decoded.remaining()]; + decoded.get(decodedArray); + assertTrue(Arrays.equals(decodedArray, testStringBytes)); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java new file mode 100644 index 000000000..57bc2e7c8 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java @@ -0,0 +1,128 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.testobjs.DerivedUser1; +import io.pravega.schemaregistry.testobjs.DerivedUser2; +import io.pravega.schemaregistry.testobjs.SchemaDefinitions; +import io.pravega.schemaregistry.testobjs.User; +import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; +import io.pravega.schemaregistry.testobjs.generated.Test1; +import io.pravega.schemaregistry.testobjs.generated.Test2; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.Test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING; +import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING_DRAFT_4; +import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING_DRAFT_7; +import static org.junit.Assert.*; + +public class SchemasTest { + @Test + public void testAvroSchema() { + AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); + assertNotNull(schema.getSchema()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schema2 = AvroSchema.of(User.class); + assertNotNull(schema2.getSchema()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schema3 = AvroSchema.of(Test1.class); + assertNotNull(schema3.getSchema()); + assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schemabase1 = AvroSchema.ofSpecificRecord(Test1.class); + assertNotNull(schemabase1.getSchema()); + assertEquals(schemabase1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schemabase2 = AvroSchema.ofSpecificRecord(Test2.class); + assertNotNull(schemabase2.getSchema()); + assertEquals(schemabase2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + } + + @Test + public void testProtobufSchema() throws IOException { + ProtobufSchema sm1 = ProtobufSchema.of(ProtobufTest.Message1.class); + assertNotNull(sm1.getParser()); + assertNotNull(sm1.getFileDescriptorSet()); + assertEquals(sm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema bm1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class); + assertNotNull(bm1.getParser()); + assertNotNull(bm1.getFileDescriptorSet()); + assertEquals(bm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema bm2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class); + assertNotNull(bm2.getParser()); + assertNotNull(bm2.getFileDescriptorSet()); + assertEquals(bm2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + + ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class.getName(), descriptorSet); + assertNull(schema.getParser()); + assertNotNull(schema.getFileDescriptorSet()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message1.class, descriptorSet); + assertNotNull(schema2.getParser()); + assertNotNull(schema2.getFileDescriptorSet()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema baseSchema1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class, descriptorSet); + assertNotNull(baseSchema1.getParser()); + assertNotNull(baseSchema1.getFileDescriptorSet()); + assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema baseSchema2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); + assertNotNull(baseSchema2.getParser()); + assertNotNull(baseSchema2.getFileDescriptorSet()); + assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + } + + @Test + public void testJsonSchema() { + JSONSchema schema = JSONSchema.of(User.class); + assertNotNull(schema.getSchema()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING, String.class); + assertNotNull(schema2.getSchema()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema3 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_4, JsonNode.class); + assertNotNull(schema3.getSchema()); + assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema4 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_7, JsonNode.class); + assertNotNull(schema4.getSchema()); + assertEquals(schema4.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema baseSchema1 = JSONSchema.ofBaseType(DerivedUser1.class, User.class); + assertNotNull(baseSchema1.getSchema()); + assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + JSONSchema baseSchema2 = JSONSchema.ofBaseType(DerivedUser2.class, User.class); + assertNotNull(baseSchema2.getSchema()); + assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java new file mode 100644 index 000000000..313355dbd --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java @@ -0,0 +1,63 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codecs; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.concurrent.ExecutionException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class CacheTest { + @Test + public void testCache() throws ExecutionException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + String groupId = "groupId"; + EncodingId encodingId = new EncodingId(0); + EncodingInfo encodingInfo = new EncodingInfo(new VersionInfo("name", 0, 0), + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), + Codecs.SnappyCompressor.getCodec().getCodecType()); + doAnswer(x -> encodingInfo).when(client).getEncodingInfo(eq(groupId), eq(encodingId)); + EncodingId encodingId2 = new EncodingId(1); + EncodingInfo encodingInfo2 = new EncodingInfo(new VersionInfo("name", 0, 1), + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), + Codecs.SnappyCompressor.getCodec().getCodecType()); + doAnswer(x -> encodingInfo2).when(client).getEncodingInfo(eq(groupId), eq(encodingId2)); + EncodingId encodingId3 = new EncodingId(2); + EncodingInfo encodingInfo3 = new EncodingInfo(new VersionInfo("name", 0, 2), + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), + Codecs.SnappyCompressor.getCodec().getCodecType()); + doAnswer(x -> encodingInfo3).when(client).getEncodingInfo(eq(groupId), eq(encodingId3)); + // create a cache with max size 2 + EncodingCache cache = new EncodingCache(groupId, client, 2); + assertEquals(cache.getMapForCache().size(), 0); + assertEquals(encodingInfo, cache.getGroupEncodingInfo(encodingId)); + assertEquals(cache.getMapForCache().size(), 1); + assertEquals(encodingInfo2, cache.getGroupEncodingInfo(encodingId2)); + assertEquals(cache.getMapForCache().size(), 2); + assertEquals(encodingInfo3, cache.getGroupEncodingInfo(encodingId3)); + assertEquals(cache.getMapForCache().size(), 2); + assertTrue(cache.getMapForCache().containsKey(encodingId2)); + assertTrue(cache.getMapForCache().containsKey(encodingId3)); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java new file mode 100644 index 000000000..8ee567058 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -0,0 +1,492 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.jsonFormatVisitors.JsonFormatTypes; +import com.fasterxml.jackson.databind.node.TextNode; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codecs; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.testobjs.Address; +import io.pravega.schemaregistry.testobjs.DerivedUser1; +import io.pravega.schemaregistry.testobjs.DerivedUser2; +import io.pravega.schemaregistry.testobjs.SchemaDefinitions; +import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; +import io.pravega.schemaregistry.testobjs.generated.Test1; +import io.pravega.schemaregistry.testobjs.generated.Test2; +import io.pravega.test.common.AssertExtensions; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.SneakyThrows; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +public class SerializerTest { + @Test + public void testAvroSerializers() { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + AvroSchema schema1 = AvroSchema.of(Test1.class); + AvroSchema schema2 = AvroSchema.of(Test2.class); + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); + VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + + Serializer serializerStr = SerializerFactory.avroSerializer(config, of); + GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); + ByteBuffer serialized1 = serializerStr.serialize(enumSymbol); + + Serializer deserializer1 = SerializerFactory.avroDeserializer(config, of); + Object deserializedEnum = deserializer1.deserialize(serialized1); + assertEquals(deserializedEnum, enumSymbol); + + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); + Test1 test1 = new Test1("name", 1); + ByteBuffer serialized = serializer.serialize(test1); + + Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); + Test1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, test1); + + serialized = serializer.serialize(test1); + Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); + Object genericDeserialized = genericDeserializer.deserialize(serialized); + assertTrue(genericDeserialized instanceof GenericRecord); + assertEquals(((GenericRecord) genericDeserialized).get("name").toString(), "name"); + assertEquals(((GenericRecord) genericDeserialized).get("field1"), 1); + + // multi type + Test2 test2 = new Test2("name", 1, "2"); + + AvroSchema schema1Base = AvroSchema.ofSpecificRecord(Test1.class); + AvroSchema schema2Base = AvroSchema.ofSpecificRecord(Test2.class); + Map, AvroSchema> map = new HashMap<>(); + map.put(Test1.class, schema1Base); + map.put(Test2.class, schema2Base); + Serializer multiSerializer = SerializerFactory.avroMultiTypeSerializer(config, map); + serialized = multiSerializer.serialize(test1); + Serializer multiDeserializer = SerializerFactory.avroMultiTypeDeserializer(config, map); + SpecificRecordBase deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, test1); + + serialized = multiSerializer.serialize(test2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, test2); + + Map, AvroSchema> map2 = new HashMap<>(); + map2.put(Test1.class, schema1Base); + Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); + + serialized = multiSerializer.serialize(test1); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), test1); + + serialized = multiSerializer.serialize(test2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + @SneakyThrows + public void testAvroSerializersReflect() { + TestClass test1 = new TestClass("name"); + AvroSchema schema1 = AvroSchema.of(TestClass.class); + + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); + ByteBuffer serialized = serializer.serialize(test1); + + Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); + TestClass deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, test1); + } + + @Test + public void testProtobufSerializers() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message3.class, descriptorSet); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serialized = serializer.serialize(message); + + Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); + ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, message); + + serialized = serializer.serialize(message); + Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, null); + DynamicMessage generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getAllFields().size(), 2); + + // multi type + ProtobufTest.Message3 message2 = ProtobufTest.Message3.newBuilder().setName("name").setField1(1).setField2(2).build(); + + ProtobufSchema schema1Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message3.class, descriptorSet); + Map, ProtobufSchema> map = new HashMap<>(); + map.put(ProtobufTest.Message2.class, schema1Base); + map.put(ProtobufTest.Message3.class, schema2Base); + Serializer multiSerializer = SerializerFactory.protobufMultiTypeSerializer(config, map); + serialized = multiSerializer.serialize(message); + Serializer multiDeserializer = SerializerFactory.protobufMultiTypeDeserializer(config, map); + GeneratedMessageV3 deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, message); + + serialized = multiSerializer.serialize(message2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, message2); + + Map, ProtobufSchema> map2 = new HashMap<>(); + map2.put(ProtobufTest.Message2.class, schema1Base); + Serializer> fallbackDeserializer = SerializerFactory.protobufTypedOrGenericDeserializer(config, map2); + serialized = multiSerializer.serialize(message); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), message); + + serialized = multiSerializer.serialize(message2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testJsonSerializers() throws JsonProcessingException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); + JSONSchema schema2 = JSONSchema.of(DerivedUser2.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serialized = serializer.serialize(user1); + + Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); + DerivedUser1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, user1); + + serialized = serializer.serialize(user1); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + WithSchema generic = genericDeserializer.deserialize(serialized); + assertEquals(((JSONSchema) generic.getSchema()).getSchema(), schema1.getSchema()); + assertEquals(((JsonNode) generic.getObject()).size(), 4); + + serialized = serializer.serialize(user1); + Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); + String str = stringDeserializer.deserialize(serialized); + assertFalse(Strings.isNullOrEmpty(str)); + + String schemaString = "{\"type\": \"object\",\"title\": \"The external data schema\",\"properties\": {\"content\": {\"type\": \"string\"}}}"; + + JSONSchema myData = JSONSchema.of("MyData", schemaString, HashMap.class); + VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + + Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); + HashMap jsonObject = new HashMap<>(); + jsonObject.put("content", "mxx"); + + ByteBuffer s = serializer2.serialize(jsonObject); + str = stringDeserializer.deserialize(s); + + String stringSchema = new ObjectMapper().writeValueAsString(JsonSchema.minimalForFormat(JsonFormatTypes.STRING)); + + JSONSchema strSchema = JSONSchema.of("string", stringSchema, String.class); + VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); + doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); + doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); + doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); + + Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); + Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); + Serializer> generic3 = SerializerFactory.jsonGenericDeserializer(config); + String string = "a"; + s = serializer3.serialize(string); + Object x = deserializer3.deserialize(s); + assertNotNull(x); + assertEquals(x, string); + s = serializer3.serialize(string); + Object jsonNode = generic3.deserialize(s); + assertTrue(((WithSchema) jsonNode).getObject() instanceof TextNode); + assertEquals(((TextNode) ((WithSchema) jsonNode).getObject()).textValue(), string); + // multi type + DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); + + JSONSchema schema1Base = JSONSchema.ofBaseType(DerivedUser1.class, Object.class); + JSONSchema schema2Base = JSONSchema.ofBaseType(DerivedUser2.class, Object.class); + Map, JSONSchema> map = new HashMap<>(); + map.put(DerivedUser1.class, schema1Base); + map.put(DerivedUser2.class, schema2Base); + Serializer multiSerializer = SerializerFactory.jsonMultiTypeSerializer(config, map); + serialized = multiSerializer.serialize(user1); + Serializer multiDeserializer = SerializerFactory.jsonMultiTypeDeserializer(config, map); + Object deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, user1); + + serialized = multiSerializer.serialize(user2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, user2); + + Map, JSONSchema> map2 = new HashMap<>(); + map2.put(DerivedUser1.class, schema1Base); + Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + serialized = multiSerializer.serialize(user1); + Either> fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), user1); + + serialized = multiSerializer.serialize(user2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testMultiformatDeserializers() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + AvroSchema schema1 = AvroSchema.of(Test1.class); + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + JSONSchema schema3 = JSONSchema.of(DerivedUser1.class); + + VersionInfo versionInfo1 = new VersionInfo("avro", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("proto", 1, 1); + VersionInfo versionInfo3 = new VersionInfo("json", 2, 2); + + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(schema3.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); + Test1 test1 = new Test1("name", 1); + ByteBuffer serializedAvro = avroSerializer.serialize(test1); + + Serializer protobufSerializer = SerializerFactory.protobufSerializer(config, schema2); + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serializedProto = protobufSerializer.serialize(message); + + Serializer jsonSerializer = SerializerFactory.jsonSerializer(config, schema3); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serializedJson = jsonSerializer.serialize(user1); + + Serializer deserializer = SerializerFactory.genericDeserializer(config); + Object deserialized = deserializer.deserialize(serializedAvro); + assertTrue(deserialized instanceof GenericRecord); + deserialized = deserializer.deserialize(serializedProto); + assertTrue(deserialized instanceof DynamicMessage); + deserialized = deserializer.deserialize(serializedJson); + assertTrue(deserialized instanceof WithSchema); + + Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); + serializedAvro.position(0); + String jsonString = jsonStringDeserializer.deserialize(serializedAvro); + assertNotNull(jsonString); + serializedProto.position(0); + jsonString = jsonStringDeserializer.deserialize(serializedProto); + assertNotNull(jsonString); + serializedJson.position(0); + jsonString = jsonStringDeserializer.deserialize(serializedJson); + assertNotNull(jsonString); + } + + @Test + public void testNoEncodingProto() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") + .writeEncodingHeader(false).build(); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) + .properties(ImmutableMap.of()).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); + verify(client, never()).getEncodingId(anyString(), any(), any()); + + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serialized = serializer.serialize(message); + + Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); + verify(client, never()).getEncodingInfo(anyString(), any()); + + ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, message); + + serialized = serializer.serialize(message); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> SerializerFactory.protobufGenericDeserializer(config, null)); + + SchemaInfo latestSchema = client.getLatestSchemaVersion("groupId", null).getSchemaInfo(); + ProtobufSchema schemaDynamic = ProtobufSchema.of(latestSchema.getType(), descriptorSet); + Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, schemaDynamic); + + DynamicMessage generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getAllFields().size(), 2); + } + + @Test + public void testNoEncodingJson() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") + .writeEncodingHeader(false).build(); + JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) + .properties(ImmutableMap.of()).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); + verify(client, never()).getEncodingId(anyString(), any(), any()); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serialized = serializer.serialize(user1); + + Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); + verify(client, never()).getEncodingInfo(anyString(), any()); + DerivedUser1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, user1); + + serialized = serializer.serialize(user1); + + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + + WithSchema generic = genericDeserializer.deserialize(serialized); + assertNotNull(generic.getObject()); + assertNull(generic.getSchema()); + } + + @Data + @NoArgsConstructor + public static class TestClass { + private String test; + + public TestClass(String test) { + this.test = test; + } + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java new file mode 100644 index 000000000..fb5113a4d --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java @@ -0,0 +1,22 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class Address { + private String streetAddress; + private String city; +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java new file mode 100644 index 000000000..bbf8651e2 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class DerivedUser1 extends User { + @Getter + private String user1; + + public DerivedUser1(String name, Address address, int age, String user1) { + super(name, address, age); + this.user1 = user1; + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java new file mode 100644 index 000000000..111caef30 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class DerivedUser2 extends User { + @Getter + private String user2; + + public DerivedUser2(String name, Address address, int age, String user2) { + super(name, address, age); + this.user2 = user2; + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java new file mode 100644 index 000000000..1feebf15f --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java @@ -0,0 +1,109 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import org.apache.avro.Schema; +import org.apache.avro.SchemaBuilder; + +public class SchemaDefinitions { + public static final Schema ENUM = SchemaBuilder + .enumeration("a").symbols("a", "b", "c"); + + public static final Schema SCHEMA1 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + public static final Schema SCHEMA2 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .withDefault("backwardPolicy compatible with schema1") + .endRecord(); + + public static final Schema SCHEMA3 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("c") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + public static final String JSON_SCHEMA_STRING = "{" + + "\"title\": \"Person\", " + + "\"type\": \"object\", " + + "\"properties\": { " + + "\"name\": {" + + "\"type\": \"string\"" + + "}," + + "\"age\": {" + + "\"type\": \"integer\", \"minimum\": 0" + + "}" + + "}" + + "}"; + + public static final String JSON_SCHEMA_STRING_DRAFT_4 = "{\n" + + " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n" + + " \"title\": \"User\",\n" + + " \"id\": \"UserV4\",\n" + + " \"type\": \"object\",\n" + + "\t\n" + + " \"properties\": {\n" + + "\t\n" + + " \"id\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + "\t\t\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + "\t\t\n" + + " \"age\": {\n" + + " \"type\": \"number\",\n" + + " \"minimum\": 0,\n" + + " \"exclusiveMinimum\": true\n" + + " }\n" + + " },\n" + + "\t\n" + + " \"required\": [\"id\", \"name\", \"age\"]\n" + + "}"; + + public static final String JSON_SCHEMA_STRING_DRAFT_7 = "{\n" + + " \"$id\": \"UserV7\",\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + + " \"title\": \"User\",\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"firstName\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"lastName\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\",\n" + + " \"minimum\": 0\n" + + " }\n" + + " }\n" + + "}"; +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java new file mode 100644 index 000000000..fb7129bf3 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class User { + @Getter + private String name; + @Getter + private Address address; + @Getter + private int age; + +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java new file mode 100644 index 000000000..96a34c85e --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java @@ -0,0 +1,2836 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: protobufTest.proto + +package io.pravega.schemaregistry.testobjs.generated; + +public final class ProtobufTest { + private ProtobufTest() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface InternalMessageOrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + com.google.protobuf.MessageOrBuilder { + + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The enum numeric value on the wire for value. + */ + int getValueValue(); + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The value. + */ + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage} + */ + public static final class InternalMessage extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + InternalMessageOrBuilder { + private static final long serialVersionUID = 0L; + // Use InternalMessage.newBuilder() to construct. + private InternalMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InternalMessage() { + value_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new InternalMessage(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private InternalMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + int rawValue = input.readEnum(); + + value_ = rawValue; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder.class); + } + + /** + * Protobuf enum {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values} + */ + public enum Values + implements com.google.protobuf.ProtocolMessageEnum { + /** + * val1 = 0; + */ + val1(0), + /** + * val2 = 1; + */ + val2(1), + /** + * val3 = 2; + */ + val3(2), + /** + * val4 = 3; + */ + val4(3), + UNRECOGNIZED(-1), + ; + + /** + * val1 = 0; + */ + public static final int val1_VALUE = 0; + /** + * val2 = 1; + */ + public static final int val2_VALUE = 1; + /** + * val3 = 2; + */ + public static final int val3_VALUE = 2; + /** + * val4 = 3; + */ + public static final int val4_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Values valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Values forNumber(int value) { + switch (value) { + case 0: return val1; + case 1: return val2; + case 2: return val3; + case 3: return val4; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Values> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Values findValueByNumber(int number) { + return Values.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDescriptor().getEnumTypes().get(0); + } + + private static final Values[] VALUES = values(); + + public static Values valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Values(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values) + } + + public static final int VALUE_FIELD_NUMBER = 1; + private int value_; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The enum numeric value on the wire for value. + */ + public int getValueValue() { + return value_; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The value. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue() { + @SuppressWarnings("deprecation") + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values result = io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.valueOf(value_); + return result == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (value_ != io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.val1.getNumber()) { + output.writeEnum(1, value_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (value_ != io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.val1.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, value_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) obj; + + if (value_ != other.value_) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + value_; + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + value_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage(this); + result.value_ = value_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance()) return this; + if (other.value_ != 0) { + setValueValue(other.getValueValue()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int value_ = 0; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The enum numeric value on the wire for value. + */ + public int getValueValue() { + return value_; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @param value The enum numeric value on the wire for value to set. + * @return This builder for chaining. + */ + public Builder setValueValue(int value) { + value_ = value; + onChanged(); + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The value. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue() { + @SuppressWarnings("deprecation") + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values result = io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.valueOf(value_); + return result == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values value) { + if (value == null) { + throw new NullPointerException(); + } + + value_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + + value_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InternalMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InternalMessage(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Message1OrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.Message1) + com.google.protobuf.MessageOrBuilder { + + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return Whether the internal field is set. + */ + boolean hasInternal(); + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return The internal. + */ + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal(); + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder(); + + /** + * string name = 2; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 2; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message1} + */ + public static final class Message1 extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.Message1) + Message1OrBuilder { + private static final long serialVersionUID = 0L; + // Use Message1.newBuilder() to construct. + private Message1(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Message1() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Message1(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Message1( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder subBuilder = null; + if (internal_ != null) { + subBuilder = internal_.toBuilder(); + } + internal_ = input.readMessage(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(internal_); + internal_ = subBuilder.buildPartial(); + } + + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.Builder.class); + } + + public static final int INTERNAL_FIELD_NUMBER = 1; + private io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage internal_; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return Whether the internal field is set. + */ + public boolean hasInternal() { + return internal_ != null; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return The internal. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal() { + return internal_ == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { + return getInternal(); + } + + public static final int NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object name_; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (internal_ != null) { + output.writeMessage(1, getInternal()); + } + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (internal_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getInternal()); + } + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) obj; + + if (hasInternal() != other.hasInternal()) return false; + if (hasInternal()) { + if (!getInternal() + .equals(other.getInternal())) return false; + } + if (!getName() + .equals(other.getName())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasInternal()) { + hash = (37 * hash) + INTERNAL_FIELD_NUMBER; + hash = (53 * hash) + getInternal().hashCode(); + } + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message1} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message1) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1OrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (internalBuilder_ == null) { + internal_ = null; + } else { + internal_ = null; + internalBuilder_ = null; + } + name_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1(this); + if (internalBuilder_ == null) { + result.internal_ = internal_; + } else { + result.internal_ = internalBuilder_.build(); + } + result.name_ = name_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.getDefaultInstance()) return this; + if (other.hasInternal()) { + mergeInternal(other.getInternal()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage internal_; + private com.google.protobuf.SingleFieldBuilderV3< + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder> internalBuilder_; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return Whether the internal field is set. + */ + public boolean hasInternal() { + return internalBuilder_ != null || internal_ != null; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return The internal. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal() { + if (internalBuilder_ == null) { + return internal_ == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + } else { + return internalBuilder_.getMessage(); + } + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder setInternal(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage value) { + if (internalBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + internal_ = value; + onChanged(); + } else { + internalBuilder_.setMessage(value); + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder setInternal( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder builderForValue) { + if (internalBuilder_ == null) { + internal_ = builderForValue.build(); + onChanged(); + } else { + internalBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder mergeInternal(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage value) { + if (internalBuilder_ == null) { + if (internal_ != null) { + internal_ = + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.newBuilder(internal_).mergeFrom(value).buildPartial(); + } else { + internal_ = value; + } + onChanged(); + } else { + internalBuilder_.mergeFrom(value); + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder clearInternal() { + if (internalBuilder_ == null) { + internal_ = null; + onChanged(); + } else { + internal_ = null; + internalBuilder_ = null; + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder getInternalBuilder() { + + onChanged(); + return getInternalFieldBuilder().getBuilder(); + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { + if (internalBuilder_ != null) { + return internalBuilder_.getMessageOrBuilder(); + } else { + return internal_ == null ? + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + } + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder> + getInternalFieldBuilder() { + if (internalBuilder_ == null) { + internalBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder>( + getInternal(), + getParentForChildren(), + isClean()); + internal_ = null; + } + return internalBuilder_; + } + + private java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 2; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * string name = 2; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * string name = 2; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.Message1) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message1) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Message1 parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Message1(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Message2OrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.Message2) + com.google.protobuf.MessageOrBuilder { + + /** + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * int32 field1 = 2; + * @return The field1. + */ + int getField1(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message2} + */ + public static final class Message2 extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.Message2) + Message2OrBuilder { + private static final long serialVersionUID = 0L; + // Use Message2.newBuilder() to construct. + private Message2(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Message2() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Message2(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Message2( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + field1_ = input.readInt32(); + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FIELD1_FIELD_NUMBER = 2; + private int field1_; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (field1_ != 0) { + output.writeInt32(2, field1_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (field1_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, field1_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) obj; + + if (!getName() + .equals(other.getName())) return false; + if (getField1() + != other.getField1()) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + FIELD1_FIELD_NUMBER; + hash = (53 * hash) + getField1(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message2} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message2) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2OrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + field1_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2(this); + result.name_ = name_; + result.field1_ = field1_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getField1() != 0) { + setField1(other.getField1()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int field1_ ; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + /** + * int32 field1 = 2; + * @param value The field1 to set. + * @return This builder for chaining. + */ + public Builder setField1(int value) { + + field1_ = value; + onChanged(); + return this; + } + /** + * int32 field1 = 2; + * @return This builder for chaining. + */ + public Builder clearField1() { + + field1_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.Message2) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message2) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Message2 parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Message2(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Message3OrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.Message3) + com.google.protobuf.MessageOrBuilder { + + /** + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * int32 field1 = 2; + * @return The field1. + */ + int getField1(); + + /** + * int32 field2 = 3; + * @return The field2. + */ + int getField2(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message3} + */ + public static final class Message3 extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.Message3) + Message3OrBuilder { + private static final long serialVersionUID = 0L; + // Use Message3.newBuilder() to construct. + private Message3(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Message3() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Message3(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Message3( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + field1_ = input.readInt32(); + break; + } + case 24: { + + field2_ = input.readInt32(); + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FIELD1_FIELD_NUMBER = 2; + private int field1_; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + + public static final int FIELD2_FIELD_NUMBER = 3; + private int field2_; + /** + * int32 field2 = 3; + * @return The field2. + */ + public int getField2() { + return field2_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (field1_ != 0) { + output.writeInt32(2, field1_); + } + if (field2_ != 0) { + output.writeInt32(3, field2_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (field1_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, field1_); + } + if (field2_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, field2_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) obj; + + if (!getName() + .equals(other.getName())) return false; + if (getField1() + != other.getField1()) return false; + if (getField2() + != other.getField2()) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + FIELD1_FIELD_NUMBER; + hash = (53 * hash) + getField1(); + hash = (37 * hash) + FIELD2_FIELD_NUMBER; + hash = (53 * hash) + getField2(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message3} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message3) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3OrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + field1_ = 0; + + field2_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3(this); + result.name_ = name_; + result.field1_ = field1_; + result.field2_ = field2_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getField1() != 0) { + setField1(other.getField1()); + } + if (other.getField2() != 0) { + setField2(other.getField2()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int field1_ ; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + /** + * int32 field1 = 2; + * @param value The field1 to set. + * @return This builder for chaining. + */ + public Builder setField1(int value) { + + field1_ = value; + onChanged(); + return this; + } + /** + * int32 field1 = 2; + * @return This builder for chaining. + */ + public Builder clearField1() { + + field1_ = 0; + onChanged(); + return this; + } + + private int field2_ ; + /** + * int32 field2 = 3; + * @return The field2. + */ + public int getField2() { + return field2_; + } + /** + * int32 field2 = 3; + * @param value The field2 to set. + * @return This builder for chaining. + */ + public Builder setField2(int value) { + + field2_ = value; + onChanged(); + return this; + } + /** + * int32 field2 = 3; + * @return This builder for chaining. + */ + public Builder clearField2() { + + field2_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.Message3) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message3) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Message3 parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Message3(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\022protobufTest.proto\022,io.pravega.schemar" + + "egistry.testobjs.generated\"\230\001\n\017InternalM" + + "essage\022S\n\005value\030\001 \001(\0162D.io.pravega.schem" + + "aregistry.testobjs.generated.InternalMes" + + "sage.Values\"0\n\006Values\022\010\n\004val1\020\000\022\010\n\004val2\020" + + "\001\022\010\n\004val3\020\002\022\010\n\004val4\020\003\"i\n\010Message1\022O\n\010int" + + "ernal\030\001 \001(\0132=.io.pravega.schemaregistry." + + "testobjs.generated.InternalMessage\022\014\n\004na" + + "me\030\002 \001(\t\"(\n\010Message2\022\014\n\004name\030\001 \001(\t\022\016\n\006fi" + + "eld1\030\002 \001(\005\"8\n\010Message3\022\014\n\004name\030\001 \001(\t\022\016\n\006" + + "field1\030\002 \001(\005\022\016\n\006field2\030\003 \001(\005b\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor, + new java.lang.String[] { "Value", }); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor, + new java.lang.String[] { "Internal", "Name", }); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor, + new java.lang.String[] { "Name", "Field1", }); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor, + new java.lang.String[] { "Name", "Field1", "Field2", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java new file mode 100644 index 000000000..3cbe85040 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java @@ -0,0 +1,389 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.pravega.schemaregistry.testobjs.generated; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class Test1 extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -7987201165438288421L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Test1\",\"namespace\":\"io.pravega.schemaregistry.testobjs.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"field1\",\"type\":\"int\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Test1 to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Test1 from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Test1 instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Test1 fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + private int field1; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Test1() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param field1 The new value for field1 + */ + public Test1(CharSequence name, Integer field1) { + this.name = name; + this.field1 = field1; + } + + public SpecificData getSpecificData() { return MODEL$; } + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return field1; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: field1 = (Integer)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'field1' field. + * @return The value of the 'field1' field. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value the value to set. + */ + public void setField1(int value) { + this.field1 = value; + } + + /** + * Creates a new Test1 RecordBuilder. + * @return A new Test1 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder() { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + } + + /** + * Creates a new Test1 RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Test1 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(other); + } + } + + /** + * Creates a new Test1 RecordBuilder by copying an existing Test1 instance. + * @param other The existing instance to copy. + * @return A new Test1 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test1 other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(other); + } + } + + /** + * RecordBuilder for Test1 instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private int field1; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + } + + /** + * Creates a Builder by copying an existing Test1 instance + * @param other The existing instance to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test1 other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'field1' field. + * @return The value. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value The value of 'field1'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder setField1(int value) { + validate(fields()[1], value); + this.field1 = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'field1' field has been set. + * @return True if the 'field1' field has been set, false otherwise. + */ + public boolean hasField1() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'field1' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder clearField1() { + fieldSetFlags()[1] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Test1 build() { + try { + Test1 record = new Test1(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.field1 = fieldSetFlags()[1] ? this.field1 : (Integer) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeInt(this.field1); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.field1 = in.readInt(); + + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.field1 = in.readInt(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java new file mode 100644 index 000000000..3695a01a3 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java @@ -0,0 +1,469 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.pravega.schemaregistry.testobjs.generated; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class Test2 extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -8157678982198772485L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Test2\",\"namespace\":\"io.pravega.schemaregistry.testobjs.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"string\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Test2 to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Test2 from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Test2 instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Test2 fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + private int field1; + private CharSequence field2; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Test2() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param field1 The new value for field1 + * @param field2 The new value for field2 + */ + public Test2(CharSequence name, Integer field1, CharSequence field2) { + this.name = name; + this.field1 = field1; + this.field2 = field2; + } + + public SpecificData getSpecificData() { return MODEL$; } + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return field1; + case 2: return field2; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: field1 = (Integer)value$; break; + case 2: field2 = (CharSequence)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'field1' field. + * @return The value of the 'field1' field. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value the value to set. + */ + public void setField1(int value) { + this.field1 = value; + } + + /** + * Gets the value of the 'field2' field. + * @return The value of the 'field2' field. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value the value to set. + */ + public void setField2(CharSequence value) { + this.field2 = value; + } + + /** + * Creates a new Test2 RecordBuilder. + * @return A new Test2 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder() { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + } + + /** + * Creates a new Test2 RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Test2 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(other); + } + } + + /** + * Creates a new Test2 RecordBuilder by copying an existing Test2 instance. + * @param other The existing instance to copy. + * @return A new Test2 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test2 other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(other); + } + } + + /** + * RecordBuilder for Test2 instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private int field1; + private CharSequence field2; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing Test2 instance + * @param other The existing instance to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test2 other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'field1' field. + * @return The value. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value The value of 'field1'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setField1(int value) { + validate(fields()[1], value); + this.field1 = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'field1' field has been set. + * @return True if the 'field1' field has been set, false otherwise. + */ + public boolean hasField1() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'field1' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearField1() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'field2' field. + * @return The value. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value The value of 'field2'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setField2(CharSequence value) { + validate(fields()[2], value); + this.field2 = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'field2' field has been set. + * @return True if the 'field2' field has been set, false otherwise. + */ + public boolean hasField2() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'field2' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearField2() { + field2 = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Test2 build() { + try { + Test2 record = new Test2(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.field1 = fieldSetFlags()[1] ? this.field1 : (Integer) defaultValue(fields()[1]); + record.field2 = fieldSetFlags()[2] ? this.field2 : (CharSequence) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeInt(this.field1); + + out.writeString(this.field2); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.field1 = in.readInt(); + + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.field1 = in.readInt(); + break; + + case 2: + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java new file mode 100644 index 000000000..32a122681 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java @@ -0,0 +1,549 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.pravega.schemaregistry.testobjs.generated; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class Test3 extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 910195546659301614L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Test3\",\"namespace\":\"io.pravega.schemaregistry.testobjs.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"string\"},{\"name\":\"field3\",\"type\":\"string\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Test3 to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Test3 from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Test3 instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Test3 fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + private int field1; + private CharSequence field2; + private CharSequence field3; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Test3() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param field1 The new value for field1 + * @param field2 The new value for field2 + * @param field3 The new value for field3 + */ + public Test3(CharSequence name, Integer field1, CharSequence field2, CharSequence field3) { + this.name = name; + this.field1 = field1; + this.field2 = field2; + this.field3 = field3; + } + + public SpecificData getSpecificData() { return MODEL$; } + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return field1; + case 2: return field2; + case 3: return field3; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: field1 = (Integer)value$; break; + case 2: field2 = (CharSequence)value$; break; + case 3: field3 = (CharSequence)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'field1' field. + * @return The value of the 'field1' field. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value the value to set. + */ + public void setField1(int value) { + this.field1 = value; + } + + /** + * Gets the value of the 'field2' field. + * @return The value of the 'field2' field. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value the value to set. + */ + public void setField2(CharSequence value) { + this.field2 = value; + } + + /** + * Gets the value of the 'field3' field. + * @return The value of the 'field3' field. + */ + public CharSequence getField3() { + return field3; + } + + + /** + * Sets the value of the 'field3' field. + * @param value the value to set. + */ + public void setField3(CharSequence value) { + this.field3 = value; + } + + /** + * Creates a new Test3 RecordBuilder. + * @return A new Test3 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder() { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + } + + /** + * Creates a new Test3 RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Test3 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(other); + } + } + + /** + * Creates a new Test3 RecordBuilder by copying an existing Test3 instance. + * @param other The existing instance to copy. + * @return A new Test3 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test3 other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(other); + } + } + + /** + * RecordBuilder for Test3 instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private int field1; + private CharSequence field2; + private CharSequence field3; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.field3)) { + this.field3 = data().deepCopy(fields()[3].schema(), other.field3); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + } + + /** + * Creates a Builder by copying an existing Test3 instance + * @param other The existing instance to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test3 other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.field3)) { + this.field3 = data().deepCopy(fields()[3].schema(), other.field3); + fieldSetFlags()[3] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'field1' field. + * @return The value. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value The value of 'field1'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField1(int value) { + validate(fields()[1], value); + this.field1 = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'field1' field has been set. + * @return True if the 'field1' field has been set, false otherwise. + */ + public boolean hasField1() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'field1' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField1() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'field2' field. + * @return The value. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value The value of 'field2'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField2(CharSequence value) { + validate(fields()[2], value); + this.field2 = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'field2' field has been set. + * @return True if the 'field2' field has been set, false otherwise. + */ + public boolean hasField2() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'field2' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField2() { + field2 = null; + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'field3' field. + * @return The value. + */ + public CharSequence getField3() { + return field3; + } + + + /** + * Sets the value of the 'field3' field. + * @param value The value of 'field3'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField3(CharSequence value) { + validate(fields()[3], value); + this.field3 = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'field3' field has been set. + * @return True if the 'field3' field has been set, false otherwise. + */ + public boolean hasField3() { + return fieldSetFlags()[3]; + } + + + /** + * Clears the value of the 'field3' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField3() { + field3 = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Test3 build() { + try { + Test3 record = new Test3(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.field1 = fieldSetFlags()[1] ? this.field1 : (Integer) defaultValue(fields()[1]); + record.field2 = fieldSetFlags()[2] ? this.field2 : (CharSequence) defaultValue(fields()[2]); + record.field3 = fieldSetFlags()[3] ? this.field3 : (CharSequence) defaultValue(fields()[3]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeInt(this.field1); + + out.writeString(this.field2); + + out.writeString(this.field3); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.field1 = in.readInt(); + + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + + this.field3 = in.readString(this.field3 instanceof Utf8 ? (Utf8)this.field3 : null); + + } else { + for (int i = 0; i < 4; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.field1 = in.readInt(); + break; + + case 2: + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + break; + + case 3: + this.field3 = in.readString(this.field3 instanceof Utf8 ? (Utf8)this.field3 : null); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/serializers/src/test/resources/avro/avroTest1.avsc b/serializers/src/test/resources/avro/avroTest1.avsc new file mode 100644 index 000000000..a7d5e71ea --- /dev/null +++ b/serializers/src/test/resources/avro/avroTest1.avsc @@ -0,0 +1,9 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type1", + "fields": [ + {"name": "a", "type": "string"}, + {"name": "b", "type": "int"} + ] +} \ No newline at end of file diff --git a/serializers/src/test/resources/avro/avroTest2.avsc b/serializers/src/test/resources/avro/avroTest2.avsc new file mode 100644 index 000000000..f0ebc9c52 --- /dev/null +++ b/serializers/src/test/resources/avro/avroTest2.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type2", + "fields": [ + {"name": "c", "type": "string"}, + {"name": "d", "type": "int"}, + {"name": "e", "type": "string"} + ] +} \ No newline at end of file diff --git a/serializers/src/test/resources/avro/avroTest3.avsc b/serializers/src/test/resources/avro/avroTest3.avsc new file mode 100644 index 000000000..ed07a0543 --- /dev/null +++ b/serializers/src/test/resources/avro/avroTest3.avsc @@ -0,0 +1,11 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type3", + "fields": [ + {"name": "f", "type": "string"}, + {"name": "g", "type": "int"}, + {"name": "h", "type": "string"}, + {"name": "i", "type": "string"} + ] +} \ No newline at end of file diff --git a/serializers/src/test/resources/proto/protobufTest.pb b/serializers/src/test/resources/proto/protobufTest.pb new file mode 100644 index 000000000..b0226bf03 Binary files /dev/null and b/serializers/src/test/resources/proto/protobufTest.pb differ diff --git a/serializers/src/test/resources/proto/protobufTest.proto b/serializers/src/test/resources/proto/protobufTest.proto new file mode 100644 index 000000000..4edfcae2b --- /dev/null +++ b/serializers/src/test/resources/proto/protobufTest.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; +package io.pravega.schemaregistry.testobjs.generated; + +message InternalMessage { + enum Values { + val1 = 0; + val2 = 1; + val3 = 2; + val4 = 3; + } + Values value = 1; +} + +message Message1 { + InternalMessage internal = 1; + string name = 2; +} + +message Message2 { + string name = 1; + int32 field1 = 2; +} + +message Message3 { + string name = 1; + int32 field1 = 2; + int32 field2 = 3; +} diff --git a/settings.gradle b/settings.gradle index 8e05c594e..39686cd59 100644 --- a/settings.gradle +++ b/settings.gradle @@ -13,6 +13,7 @@ rootProject.name = 'schema-registry' include 'client', 'common', 'contract', + 'serializers', 'server', 'auth'